hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5f7becb75aa2c7d062b9cadd32263fbdb828e255
| 4,211
|
py
|
Python
|
tests/test_region_parsers.py
|
michael-kotliar/MAnorm
|
df9f7159072d98743390197a6cfceb565c8f8e09
|
[
"BSD-3-Clause"
] | 22
|
2017-12-27T06:46:25.000Z
|
2021-01-22T03:22:19.000Z
|
tests/test_region_parsers.py
|
michael-kotliar/MAnorm
|
df9f7159072d98743390197a6cfceb565c8f8e09
|
[
"BSD-3-Clause"
] | 22
|
2018-01-15T09:32:50.000Z
|
2021-11-07T07:42:40.000Z
|
tests/test_region_parsers.py
|
michael-kotliar/MAnorm
|
df9f7159072d98743390197a6cfceb565c8f8e09
|
[
"BSD-3-Clause"
] | 8
|
2017-11-08T13:16:20.000Z
|
2021-08-03T07:23:46.000Z
|
import os
from manorm.region import load_manorm_peaks
def test_bed_parser(data_dir):
peaks = load_manorm_peaks(os.path.join(data_dir, 'test_peaks.bed'),
format='bed')
assert peaks.chroms == ['chr1', 'chr2', 'chr9']
assert peaks.size == 4
assert peaks.fetch('chr1')[0].start == 1
assert peaks.fetch('chr1')[0].end == 100
assert peaks.fetch('chr1')[0].summit == 50
assert peaks.fetch('chr1')[1].start == 2
assert peaks.fetch('chr1')[1].end == 200
assert peaks.fetch('chr1')[1].summit == 101
assert peaks.fetch('chr2')[0].start == 1
assert peaks.fetch('chr2')[0].end == 150
assert peaks.fetch('chr2')[0].summit == 75
assert peaks.fetch('chr9')[0].start == 5
assert peaks.fetch('chr9')[0].end == 123
assert peaks.fetch('chr9')[0].summit == 64
def test_bed3_summit_parser(data_dir):
peaks = load_manorm_peaks(os.path.join(data_dir, 'test_peaks_summit.bed'),
format='bed3-summit')
assert sorted(peaks.chroms) == ['chr1', 'chr2', 'chr9']
assert peaks.size == 4
assert peaks.fetch('chr1')[0].start == 1
assert peaks.fetch('chr1')[0].end == 100
assert peaks.fetch('chr1')[0].summit == 50
assert peaks.fetch('chr1')[1].start == 2
assert peaks.fetch('chr1')[1].end == 200
assert peaks.fetch('chr1')[1].summit == 100
assert peaks.fetch('chr2')[0].start == 1
assert peaks.fetch('chr2')[0].end == 150
assert peaks.fetch('chr2')[0].summit == 2
assert peaks.fetch('chr9')[0].start == 5
assert peaks.fetch('chr9')[0].end == 123
assert peaks.fetch('chr9')[0].summit == 55
def test_macs_parser(data_dir):
peaks = load_manorm_peaks(os.path.join(data_dir, 'test_peaks_macs.xls'),
format='macs')
assert sorted(peaks.chroms) == ['chr1', 'chr2', 'chr22']
assert peaks.size == 9
assert peaks.fetch('chr1')[0].start == 16192292
assert peaks.fetch('chr1')[0].end == 16193176
assert peaks.fetch('chr1')[0].summit == 16192491
assert peaks.fetch('chr1')[1].start == 17081409
assert peaks.fetch('chr1')[1].end == 17082059
assert peaks.fetch('chr1')[1].summit == 17081819
assert peaks.fetch('chr2')[0].start == 17082916
assert peaks.fetch('chr2')[0].end == 17084523
assert peaks.fetch('chr2')[0].summit == 17084177
assert peaks.fetch('chr22')[0].start == 17565233
assert peaks.fetch('chr22')[0].end == 17567384
assert peaks.fetch('chr22')[0].summit == 17565935
def test_macs2_parser(data_dir):
peaks = load_manorm_peaks(os.path.join(data_dir, 'test_peaks_macs2.xls'),
format='macs2')
assert sorted(peaks.chroms) == ['chr1', 'chr2', 'chr22']
assert peaks.size == 10
assert peaks.fetch('chr1')[0].start == 569795
assert peaks.fetch('chr1')[0].end == 570052
assert peaks.fetch('chr1')[0].summit == 569927
assert peaks.fetch('chr1')[1].start == 713873
assert peaks.fetch('chr1')[1].end == 714348
assert peaks.fetch('chr1')[1].summit == 714069
assert peaks.fetch('chr2')[0].start == 778179
assert peaks.fetch('chr2')[0].end == 778484
assert peaks.fetch('chr2')[0].summit == 778368
assert peaks.fetch('chr22')[0].start == 834127
assert peaks.fetch('chr22')[0].end == 834359
assert peaks.fetch('chr22')[0].summit == 834280
def test_narrowpeak_parser(data_dir):
peaks = load_manorm_peaks(os.path.join(data_dir, 'test_peaks.narrowPeak'),
format='narrowpeak')
assert sorted(peaks.chroms) == ['chr1', 'chr2', 'chr22']
assert peaks.size == 10
assert peaks.fetch('chr1')[0].start == 569795
assert peaks.fetch('chr1')[0].end == 570052
assert peaks.fetch('chr1')[0].summit == 569927
assert peaks.fetch('chr1')[1].start == 713873
assert peaks.fetch('chr1')[1].end == 714348
assert peaks.fetch('chr1')[1].summit == 714069
assert peaks.fetch('chr2')[0].start == 778179
assert peaks.fetch('chr2')[0].end == 778484
assert peaks.fetch('chr2')[0].summit == 778368
assert peaks.fetch('chr22')[0].start == 834127
assert peaks.fetch('chr22')[0].end == 834359
assert peaks.fetch('chr22')[0].summit == 834280
| 42.535354
| 78
| 0.626692
| 588
| 4,211
| 4.418367
| 0.12415
| 0.279446
| 0.369515
| 0.230947
| 0.874134
| 0.874134
| 0.749808
| 0.749808
| 0.749808
| 0.749808
| 0
| 0.131032
| 0.188079
| 4,211
| 98
| 79
| 42.969388
| 0.628839
| 0
| 0
| 0.563218
| 0
| 0
| 0.104488
| 0.009974
| 0
| 0
| 0
| 0
| 0.804598
| 1
| 0.057471
| false
| 0
| 0.022989
| 0
| 0.08046
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5f8759dfd7c8199e57ee96674e385021bb904e0c
| 213
|
py
|
Python
|
vanir/plugins/new_coin_bot/admin.py
|
guanana/vanir
|
b0bb9c874795a5803e6437ff0105ea036f1ae7b6
|
[
"Apache-2.0"
] | 1
|
2022-01-19T07:11:05.000Z
|
2022-01-19T07:11:05.000Z
|
vanir/plugins/new_coin_bot/admin.py
|
guanana/vanir
|
b0bb9c874795a5803e6437ff0105ea036f1ae7b6
|
[
"Apache-2.0"
] | 10
|
2021-11-07T14:17:07.000Z
|
2022-03-30T18:24:48.000Z
|
vanir/plugins/new_coin_bot/admin.py
|
guanana/vanir
|
b0bb9c874795a5803e6437ff0105ea036f1ae7b6
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from vanir.plugins.new_coin_bot.models import BinanceNewToken, NewCoinConfig
admin.site.register(NewCoinConfig)
admin.site.register(BinanceNewToken)
| 26.625
| 76
| 0.84507
| 27
| 213
| 6.592593
| 0.62963
| 0.202247
| 0.247191
| 0.337079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084507
| 213
| 7
| 77
| 30.428571
| 0.912821
| 0.122066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
5fa81baf9cc11be5f8de249420fcee32d7c1076a
| 2,815
|
py
|
Python
|
tests/test_if.py
|
orsinium-labs/deal-solver
|
8983f783b4a069cfd70b44e526e7cb14c237796a
|
[
"MIT"
] | 8
|
2021-07-07T16:34:54.000Z
|
2022-02-15T15:28:39.000Z
|
tests/test_if.py
|
orsinium-labs/deal-solver
|
8983f783b4a069cfd70b44e526e7cb14c237796a
|
[
"MIT"
] | null | null | null |
tests/test_if.py
|
orsinium-labs/deal-solver
|
8983f783b4a069cfd70b44e526e7cb14c237796a
|
[
"MIT"
] | null | null | null |
from deal_solver import Conclusion
from .helpers import prove_f
def test_if_then():
theorem = prove_f("""
def f():
if True:
a = 2
else:
a = 3
assert a == 2
""")
assert theorem.conclusion is Conclusion.OK
def test_if_else():
theorem = prove_f("""
def f():
if False:
a = 2
else:
a = 3
assert a == 3
""")
assert theorem.conclusion is Conclusion.OK
def test_if_no_else():
theorem = prove_f("""
def f():
a = 3
if True:
a = 2
assert a == 2
""")
assert theorem.conclusion is Conclusion.OK
def test_if_else_return_from_if():
theorem = prove_f("""
@deal.post(lambda r: r)
def f():
if True:
return True
else:
return False
""")
assert theorem.conclusion is Conclusion.OK
def test_if_else_return_from_else():
theorem = prove_f("""
@deal.post(lambda r: r)
def f():
if False:
return False
else:
return True
""")
assert theorem.conclusion is Conclusion.OK
def test_if_return_from_if():
theorem = prove_f("""
@deal.post(lambda r: r)
def f():
if True:
return True
return False
""")
assert theorem.conclusion is Conclusion.OK
def test_if_return_after_if():
theorem = prove_f("""
@deal.post(lambda r: r)
def f():
if False:
return False
return True
""")
assert theorem.conclusion is Conclusion.OK
def test_if_then_shapes_assert_ok():
theorem = prove_f("""
def f(a: int):
if a > 10:
assert a > 0
""")
assert theorem.conclusion is Conclusion.OK
def test_if_then_shapes_assert_fail():
theorem = prove_f("""
def f(a: int):
if a > 0:
assert a > 10
""")
assert theorem.conclusion is Conclusion.FAIL
def test_if_else_shapes_assert_ok():
theorem = prove_f("""
def f(a: int):
if a > 0:
pass
else:
assert a < 10
""")
assert theorem.conclusion is Conclusion.OK
def test_if_dont_interrupt():
theorem = prove_f("""
def f(a: int):
if a > 0:
pass
assert False
""")
assert theorem.conclusion is Conclusion.FAIL
def test_if_unbound_var_skip():
theorem = prove_f("""
def f(a: int):
if a > 0:
x = 13
assert x
assert True
""")
assert theorem.conclusion is Conclusion.OK
| 20.851852
| 48
| 0.492007
| 333
| 2,815
| 3.975976
| 0.123123
| 0.058912
| 0.081571
| 0.226586
| 0.867825
| 0.867825
| 0.818731
| 0.770393
| 0.756798
| 0.614804
| 0
| 0.013309
| 0.412789
| 2,815
| 134
| 49
| 21.007463
| 0.787659
| 0
| 0
| 0.788991
| 0
| 0
| 0.506927
| 0
| 0
| 0
| 0
| 0
| 0.220183
| 1
| 0.110092
| false
| 0.018349
| 0.018349
| 0
| 0.201835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5fd48d1fda45059c70a64bb70fce404e86c2159c
| 116
|
py
|
Python
|
gsfpy/timespec.py
|
irewolepeter/gsfpy_USM_Implementation
|
c4614ac3f7d833eb86ea38c7708108b130f96612
|
[
"MIT"
] | 7
|
2020-07-01T07:12:19.000Z
|
2022-01-20T20:39:57.000Z
|
gsfpy/timespec.py
|
irewolepeter/gsfpy_USM_Implementation
|
c4614ac3f7d833eb86ea38c7708108b130f96612
|
[
"MIT"
] | 36
|
2020-06-23T09:10:15.000Z
|
2022-03-22T10:27:58.000Z
|
gsfpy/timespec.py
|
irewolepeter/gsfpy_USM_Implementation
|
c4614ac3f7d833eb86ea38c7708108b130f96612
|
[
"MIT"
] | 2
|
2021-02-07T13:21:52.000Z
|
2021-06-24T19:16:16.000Z
|
from gsfpy import mirror_default_gsf_version_submodule
mirror_default_gsf_version_submodule(globals(), "timespec")
| 29
| 59
| 0.87931
| 15
| 116
| 6.266667
| 0.666667
| 0.276596
| 0.340426
| 0.489362
| 0.680851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060345
| 116
| 3
| 60
| 38.666667
| 0.862385
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3993e863ff7a02806f36e463a2cd1da942cbcc97
| 44
|
pyw
|
Python
|
make_gui_exe/text2cc_tk.pyw
|
dlehman83/text2cc
|
303798993590bceaeb5238a6cce82893c37cdfc7
|
[
"BSD-3-Clause"
] | 1
|
2021-02-12T09:34:07.000Z
|
2021-02-12T09:34:07.000Z
|
make_gui_exe/text2cc_tk.pyw
|
dlehman83/text2cc
|
303798993590bceaeb5238a6cce82893c37cdfc7
|
[
"BSD-3-Clause"
] | null | null | null |
make_gui_exe/text2cc_tk.pyw
|
dlehman83/text2cc
|
303798993590bceaeb5238a6cce82893c37cdfc7
|
[
"BSD-3-Clause"
] | null | null | null |
import text2cc.gui.tk
text2cc.gui.tk.main()
| 14.666667
| 21
| 0.772727
| 8
| 44
| 4.25
| 0.625
| 0.588235
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.068182
| 44
| 2
| 22
| 22
| 0.780488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
39cf020c6906915dd290c1812fe91ad14f31133f
| 12,929
|
py
|
Python
|
lib/space/src.py
|
Juniper/YAPT
|
b1a54998867c70352001415d5e4b70408480dab9
|
[
"BSD-3-Clause"
] | 33
|
2018-05-17T04:16:56.000Z
|
2021-11-25T21:21:02.000Z
|
lib/space/src.py
|
Juniper/YAPT
|
b1a54998867c70352001415d5e4b70408480dab9
|
[
"BSD-3-Clause"
] | 4
|
2021-01-10T20:45:31.000Z
|
2021-09-23T23:21:16.000Z
|
lib/space/src.py
|
Juniper/YAPT
|
b1a54998867c70352001415d5e4b70408480dab9
|
[
"BSD-3-Clause"
] | 8
|
2018-09-19T12:18:54.000Z
|
2021-01-10T03:49:10.000Z
|
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER
# Copyright (c) 2018 Juniper Networks, Inc.
# All rights reserved.
# Use is subject to license terms.
#
# Author: cklewar
import abc
import requests
from requests import Request, Session
from lib.logmsg import LogSpace as logmsg
import lib.constants as c
from lib.tools import Tools
requests.packages.urllib3.disable_warnings()
class SpaceRestConnector(object):
def __init__(self, space_ip=None, space_user=None, space_password=None):
self.logger = c.logger
self.__space_ip = space_ip
self.__space_user = space_user
self.__space_password = space_password
self.__space_session = Session()
self.__space_session.auth = (self.__space_user, self.__space_password)
self.__rest_timeout = c.conf.JUNOSSPACE.RestTimeout
def create_hornet_queue(self, queue):
URI = 'api/hornet-q/queues'
HEADER = {'Content-Type': 'application/hornetq.jms.queue+xml'}
BODY = '<queue name="{0}"><durable>false</durable></queue>'.format(queue)
response = self.post(URI, HEADER, BODY)
# Check status code to ensure Queue is present on space server
if response is not None:
if response.status_code == 201:
self.logger.info(Tools.create_log_msg(logmsg.SPACE, None, logmsg.SPACEPLG_Q_CREATED.format(queue)))
return True
elif response.status_code == 412:
self.logger.info(Tools.create_log_msg(logmsg.SPACE, None, logmsg.SPACEPLG_Q_ALREADY.format(queue)))
return True
else:
self.logger.info(Tools.create_log_msg(logmsg.SPACE, None, logmsg.SPACEPLG_Q_FAILED.format(queue)))
return False
else:
return False
def post(self, uri, header, body):
space_uri = "{0}{1}".format("https://{0}/".format(self.__space_ip), uri)
req = Request('POST', url=space_uri, data=body, headers=header)
prepped = self.__space_session.prepare_request(req)
try:
response = self.__space_session.send(prepped, stream=None, verify=False, proxies=None, cert=None,
timeout=10.0)
self.logger.debug(
"RESTLIB: ##########################################---POST-BEGIN---##########################################\n")
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: URL: %s', str(space_uri))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Cookie: %s', str(self.__space_session.cookies))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Request Header: %s', str(req.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Header: %s', str(response.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Code: %s', str(response.status_code))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Content: %s', str(response.content))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Body Content: ')
# Todo: Print body info to logger
self.logger.debug(body)
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------\n")
self.logger.debug(
"RESTLIB: ##########################################---POST-END---############################################\n")
return response
except requests.exceptions.RequestException as err:
self.logger.info(Tools.create_log_msg(logmsg.SPACE, None, logmsg.SPACEPLG_CONN_NOK.format(self.__space_ip, err)))
def get(self, uri, header):
space_uri = "{0}{1}".format("https://{0}/".format(self.__space_ip), uri)
req = Request('GET', url=space_uri, headers=header)
prepped = self.__space_session.prepare_request(req)
try:
response = self.__space_session.send(prepped, stream=None, verify=False, proxies=None, cert=None,
timeout=10.0)
self.logger.debug(
"RESTLIB: ##########################################---GET-BEGIN---##########################################\n")
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: URL: %s', str(space_uri))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Cookie: %s', str(self.__space_session.cookies))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Request Header: %s', str(req.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Header: %s', str(response.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Code: %s', str(response.status_code))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Content: %s', str(response.content))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug(
"RESTLIB: ##########################################---GET-END---############################################\n")
return response
except requests.exceptions.RequestException as err:
self.logger.info(
Tools.create_log_msg(logmsg.SPACE, None, logmsg.SPACEPLG_CONN_NOK.format(self.__space_ip, err)))
def delete(self, uri, header):
space_uri = "{0}{1}".format("https://{0}/".format(self.__space_ip), uri)
req = Request('DELETE', url=space_uri, headers=header)
prepped = self.__space_session.prepare_request(req)
try:
response = self.__space_session.send(prepped, stream=None, verify=False, proxies=None, cert=None,
timeout=10.0)
self.logger.debug(
"RESTLIB: ##########################################---DELETE-BEGIN---##########################################\n")
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: URL: %s', str(space_uri))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Cookie: %s', str(self.__space_session.cookies))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Request Header: %s', str(req.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Header: %s', str(response.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Code: %s', str(response.status_code))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Content: %s', str(response.content))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug(
"RESTLIB: ##########################################---DELETE-END---############################################\n")
return response
except requests.exceptions.RequestException as err:
self.logger.info(
Tools.create_log_msg(logmsg.SPACE, None, logmsg.SPACEPLG_CONN_NOK.format(self.__space_ip, err)))
def head(self, uri, header, body):
space_uri = "{0}{1}".format("https://{0}/".format(self.__space_ip), uri)
req = Request('HEAD', url=space_uri, data=body, headers=header)
prepped = self.__space_session.prepare_request(req)
try:
response = self.__space_session.send(prepped, stream=None, verify=False, proxies=None, cert=None,
timeout=10.0)
self.logger.debug(
"RESTLIB: ##########################################---HEAD-BEGIN---##########################################\n")
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: URL: %s', str(space_uri))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Cookie: %s', str(self.__space_session.cookies))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Request Header: %s', str(req.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Header: %s', str(response.headers))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Code: %s', str(response.status_code))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Response Content: %s', str(response.content))
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------")
self.logger.debug('RESTLIB: Body Content: ')
# Todo: log to logger
self.logger.debug(body)
self.logger.debug(
"RESTLIB: ----------------------------------------------------------------------------------------------\n")
self.logger.debug(
"RESTLIB: ##########################################---HEAD-END---############################################\n")
return response
except requests.exceptions.RequestException as err:
self.logger.info(
Tools.create_log_msg(logmsg.SPACE, None, logmsg.SPACEPLG_CONN_NOK.format(self.__space_ip, err)))
@abc.abstractmethod
def discover_by_space(self, sample_device=None, shared=None):
raise NotImplementedError()
@abc.abstractmethod
def discover_by_configlet(self, sample_device=None, shared=None):
raise NotImplementedError()
def end_session(self):
self.__space_session.close()
| 54.783898
| 132
| 0.414108
| 1,035
| 12,929
| 5.021256
| 0.142029
| 0.14239
| 0.190495
| 0.270926
| 0.800462
| 0.772369
| 0.772369
| 0.772369
| 0.751587
| 0.751587
| 0
| 0.003587
| 0.223683
| 12,929
| 235
| 133
| 55.017021
| 0.514197
| 0.022043
| 0
| 0.744681
| 0
| 0
| 0.380956
| 0.294285
| 0
| 0
| 0
| 0.004255
| 0
| 1
| 0.047872
| false
| 0.015957
| 0.031915
| 0
| 0.12766
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
39dbfa23d3aaeccd4f4570efa1ab458eab0ba225
| 134
|
py
|
Python
|
stable_baselines/sac1/__init__.py
|
createamind/stable-baselines
|
663f2cd71560c53ebe01e41e560386dd9568f50f
|
[
"MIT"
] | 1
|
2020-10-20T06:13:15.000Z
|
2020-10-20T06:13:15.000Z
|
stable_baselines/sac1/__init__.py
|
createamind/stable-baselines
|
663f2cd71560c53ebe01e41e560386dd9568f50f
|
[
"MIT"
] | null | null | null |
stable_baselines/sac1/__init__.py
|
createamind/stable-baselines
|
663f2cd71560c53ebe01e41e560386dd9568f50f
|
[
"MIT"
] | null | null | null |
from stable_baselines.sac1.sac1 import SAC1
from stable_baselines.sac1.policies import MlpPolicy, CnnPolicy, LnMlpPolicy, LnCnnPolicy
| 44.666667
| 89
| 0.865672
| 17
| 134
| 6.705882
| 0.588235
| 0.175439
| 0.333333
| 0.403509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03252
| 0.08209
| 134
| 2
| 90
| 67
| 0.894309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f2e7a757adc8ce40455b0909ae7ef13b1a4f7b04
| 120
|
py
|
Python
|
6 kyu/Last nonzero digit of factorial.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
6 kyu/Last nonzero digit of factorial.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
6 kyu/Last nonzero digit of factorial.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def last_digit(n):
return 1 if n<=1 else 6*[1, 1, 2, 6, 4, 4, 4, 8, 4, 6][n%10]*3**(n//5%4)*last_digit(n//5)%10
| 40
| 96
| 0.508333
| 32
| 120
| 1.84375
| 0.46875
| 0.305085
| 0.338983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221053
| 0.208333
| 120
| 3
| 97
| 40
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
fffc56239631cb80c9eaf81ea0817e255f890a90
| 9,598
|
py
|
Python
|
old/lib/test/games/ttt/ttt_game_state_test.py
|
feiooo/games-puzzles-algorithms
|
66d97135d163fb04e820338068d9bd9e12d907e9
|
[
"MIT"
] | null | null | null |
old/lib/test/games/ttt/ttt_game_state_test.py
|
feiooo/games-puzzles-algorithms
|
66d97135d163fb04e820338068d9bd9e12d907e9
|
[
"MIT"
] | null | null | null |
old/lib/test/games/ttt/ttt_game_state_test.py
|
feiooo/games-puzzles-algorithms
|
66d97135d163fb04e820338068d9bd9e12d907e9
|
[
"MIT"
] | null | null | null |
from games_puzzles_algorithms.games.ttt.game_state import BoardValues, GameState
def assert_no_winner(patient):
assert(patient.score(BoardValues.X) is None)
assert(patient.score(BoardValues.O) is None)
assert(patient.winner() is None)
def assert_X_wins(patient):
assert(patient.score(BoardValues.X) is 1)
assert(patient.score(BoardValues.O) is -1)
assert(patient.winner() is BoardValues.X)
assert(patient.is_terminal())
assert(patient.num_legal_actions() == 0)
def assert_O_wins(patient):
assert(patient.score(BoardValues.X) is -1)
assert(patient.score(BoardValues.O) is 1)
assert(patient.winner() is BoardValues.O)
assert(patient.is_terminal())
assert(patient.num_legal_actions() == 0)
def test_m_x_n_board():
patient = GameState(2, 4)
assert_no_winner(patient)
assert(
str(patient) ==
"\n" +
" A B C D\n" +
"1 | | | \n" +
" -|-|-|-\n" +
"2 | | | \n"
)
def test_k_win():
patient = GameState(3, num_spaces_to_win=2)
assert_no_winner(patient)
assert(
str(patient) ==
"\n" +
" A B C\n" +
"1 | | \n" +
" -|-|-\n" +
"2 | | \n" +
" -|-|-\n" +
"3 | | \n"
)
patient.play(0)
patient.play(1)
patient.play(3)
assert(
str(patient) ==
"\n" +
" A B C\n" +
"1 X|X| \n" +
" -|-|-\n" +
"2 O| | \n" +
" -|-|-\n" +
"3 | | \n"
)
assert_X_wins(patient)
def test_empty_board():
'''Check that GameState instance is created with an empty board'''
patient = GameState(3)
assert_no_winner(patient)
assert(
str(patient) ==
"\n" +
" A B C\n" +
"1 | | \n" +
" -|-|-\n" +
"2 | | \n" +
" -|-|-\n" +
"3 | | \n"
)
def test_large_board_representation():
'''Check that large boards are represented clearly.'''
patient = GameState(10)
assert(
str(patient) ==
"\n" +
" A B C D E F G H I J\n" +
"1 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"2 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"3 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"4 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"5 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"6 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"7 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"8 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"9 | | | | | | | | | \n" +
" -|-|-|-|-|-|-|-|-|-\n" +
"10 | | | | | | | | | \n"
)
def test_first_player_to_move():
'''
Check that the player to move is X
'''
patient = GameState(3)
assert_no_winner(patient)
assert(patient.player_to_act() == BoardValues.X)
def test_moves():
'''
Check that a move can be taken by specifying a row and column, and
only empty spaces can be taken
'''
patient = GameState(3)
assert_no_winner(patient)
assert(
str(patient.play(patient._spaces.index(1, 1))) ==
"\n" +
" A B C\n" +
"1 | | \n" +
" -|-|-\n" +
"2 |X| \n" +
" -|-|-\n" +
"3 | | \n"
)
assert(patient.player_to_act() == BoardValues.O)
try: patient.play(patient._spaces.index(1, 1))
except IndexError: pass
else: raise "Should have raised IndexError"
def test_row_win():
'''
Check that the match is won properly on a row
'''
patient = GameState(3)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0)) \
.play(patient._spaces.index(1, 0)) \
.play(patient._spaces.index(0, 1)) \
.play(patient._spaces.index(1, 2)) \
.play(patient._spaces.index(0, 2))
assert_X_wins(patient)
def test_column_win():
'''
Check that the match is won properly on a column
'''
patient = GameState(3)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0)) \
.play(patient._spaces.index(1, 1)) \
.play(patient._spaces.index(1, 0)) \
.play(patient._spaces.index(1, 2)) \
.play(patient._spaces.index(2, 0))
assert_X_wins(patient)
def test_diag_1_win():
'''
Check that the match is won properly on first diagonal
(bottom left to top right)
'''
patient = GameState(3)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0)) \
.play(patient._spaces.index(1, 0)) \
.play(patient._spaces.index(1, 1)) \
.play(patient._spaces.index(1, 2)) \
.play(patient._spaces.index(2, 2))
assert_X_wins(patient)
def test_draw():
'''
Check that the match is drawn
'''
patient = GameState(3)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0)) \
.play(patient._spaces.index(1, 0)) \
.play(patient._spaces.index(0, 1)) \
.play(patient._spaces.index(1, 1)) \
.play(patient._spaces.index(1, 2)) \
.play(patient._spaces.index(0, 2)) \
.play(patient._spaces.index(2, 0)) \
.play(patient._spaces.index(2, 1)) \
.play(patient._spaces.index(2, 2))
assert(patient.is_terminal())
assert(patient.score(BoardValues.X) == 0)
assert(patient.is_terminal())
assert(patient.num_legal_actions() == 0)
def test_empty_undo():
'''
Check that undoing an empty board doesn't break.
'''
patient = GameState(3)
patient.undo()
assert(patient.player_to_act() == BoardValues.X)
def test_winner_after_undo():
'''
Check that undoing a move after a win no longer results in a win.
'''
patient = GameState(3)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0)) \
.play(patient._spaces.index(1, 0)) \
.play(patient._spaces.index(1, 1)) \
.play(patient._spaces.index(1, 2)) \
.play(patient._spaces.index(2, 2))
assert_X_wins(patient)
patient.undo()
assert_no_winner(patient)
assert(not patient.is_terminal())
assert(patient.num_legal_actions() == 5)
def test_win_detection():
patient = GameState(4, 6, 2)
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 5))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 3))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 2))
assert_O_wins(patient)
def test_win_detection_full_game():
patient = GameState(3, 3, 3)
assert_no_winner(patient)
patient.play(patient._spaces.index(2, 2))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(2, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(2, 0))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 0))
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 2))
assert_O_wins(patient)
def test_win_detection_full_game2():
patient = GameState(3, 3, 3)
assert_no_winner(patient)
patient.play(patient._spaces.index(2, 0))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(2, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(2, 2))
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 2))
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0))
assert_O_wins(patient)
def test_row_win_k_in_a_row():
patient = GameState(3, 4, 2)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0))
patient.play(patient._spaces.index(1, 0))
patient.play(patient._spaces.index(0, 1))
assert_X_wins(patient)
def test_column_win_k_in_a_row():
patient = GameState(3, 4, 2)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0))
patient.play(patient._spaces.index(0, 1))
patient.play(patient._spaces.index(1, 0))
assert_X_wins(patient)
def test_diagonal_win_k_in_a_row():
patient = GameState(3, 4, 2)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 0))
patient.play(patient._spaces.index(0, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 1))
assert_X_wins(patient)
def test_anti_diagonal_win_k_in_a_row():
patient = GameState(3, 4, 2)
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 2))
patient.play(patient._spaces.index(0, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 1))
assert_X_wins(patient)
def test_anti_diagonal_win_k_in_a_row_downwards():
patient = GameState(3, 4, 2)
assert_no_winner(patient)
patient.play(patient._spaces.index(1, 1))
patient.play(patient._spaces.index(0, 1))
assert_no_winner(patient)
patient.play(patient._spaces.index(0, 2))
assert_X_wins(patient)
def test_heuristic():
patient = GameState(3)
assert patient.heuristic(0) == 0
assert patient.heuristic(1) == 0
patient.play(patient._spaces.index(0, 0))
patient.play(patient._spaces.index(1, 0))
patient.play(patient._spaces.index(1, 1))
patient.play(patient._spaces.index(1, 2))
value = (5 - 1) / 24
assert patient.heuristic(0) == value
| 28.39645
| 80
| 0.581684
| 1,263
| 9,598
| 4.209818
| 0.098179
| 0.136543
| 0.211021
| 0.273086
| 0.812864
| 0.798759
| 0.762836
| 0.724845
| 0.691932
| 0.660147
| 0
| 0.030471
| 0.24776
| 9,598
| 337
| 81
| 28.480712
| 0.705956
| 0.058762
| 0
| 0.681818
| 0
| 0
| 0.087802
| 0.021248
| 0
| 0
| 0
| 0
| 0.310606
| 1
| 0.090909
| false
| 0.003788
| 0.003788
| 0
| 0.094697
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
080dd87ef967eb8a145e532cfafc5e23f18f2a1d
| 18,029
|
py
|
Python
|
tests/test_controller.py
|
ClNo/safe-chicken
|
8ee6d6746f9a660ae7f1ffb6a5483b459307e19e
|
[
"MIT"
] | 2
|
2021-01-21T17:45:31.000Z
|
2021-01-31T01:51:51.000Z
|
tests/test_controller.py
|
ClNo/safe-chicken
|
8ee6d6746f9a660ae7f1ffb6a5483b459307e19e
|
[
"MIT"
] | null | null | null |
tests/test_controller.py
|
ClNo/safe-chicken
|
8ee6d6746f9a660ae7f1ffb6a5483b459307e19e
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from safechicken import timecontrol
from safechicken.controller import Controller
class Dispatcher:
def __init__(self, current_datetime_iso):
self.expired = 0
self.erroro_code = 0
self.err_str = ''
self.command_out = {}
self.current_datetime = datetime.fromisoformat(current_datetime_iso)
# callback method
def notify_force_expired(self):
self.expired = 1
# callback method
def report_err(self, code, err_str):
self.erroro_code = code
self.err_str = err_str
def clear_err(self):
self.erroro_code = 0
self.err_str = ''
def command_changed(self, command_out):
self.command_out = command_out
def set_datetime_now(self, current_datetime_iso):
self.current_datetime = datetime.fromisoformat(current_datetime_iso)
def get_datetime_now(self):
# separate method as it is used for testing
return self.current_datetime
time_control_1 = {
"latitude": 47.03,
"longitude": 7.31,
"minutes_after_sunrise": 15,
"minutes_after_sunset": 15
}
controller_conf_1 = {
"start_action_delay": 1.5,
"force_time_expire_minutes": 180
}
expected_command_args = ['current', 'reason', 'next', 'next_time', 'reason_next']
def test_invalid_time():
# test an invalid system clock (too old)
dispatcher = Dispatcher('2019-12-31T23:00:00')
controller = Controller()
controller.init(controller_conf_1, dispatcher)
controller.set_static_time('9:00', '18:00')
controller.set_door_prio('static', 'static')
controller.set_force_operation('auto', None)
door_times, door_times_converted =\
timecontrol.recalc_door_times(time_control_1, datetime.fromisoformat('2019-12-31T23:00:00'), None, None)
assert door_times, 'Sun calculation (door_times) is not working'
assert door_times_converted, 'Sun calculation (door_times) is not working'
assert 'sunrise_open_time' in door_times_converted, 'Sun calculation (door_times) as no field "sunrise_open_time"'
assert 'sunset_close_time' in door_times_converted, 'Sun calculation (door_times) as no field "sunset_close_time"'
controller.set_sunbased_time(door_times_converted['sunrise_open_time'],
door_times_converted['sunset_close_time'])
assert dispatcher.erroro_code == 100, 'Date is invalid, but no error code set'
for arg in expected_command_args:
assert arg in dispatcher.command_out, 'Argument "{0}" not found in commant_out dict: {1}'.format(arg, dispatcher.command_out)
assert dispatcher.command_out[arg] is None, 'Argument "{0}" has to be "None" because date is invalid: {1}'.format(arg, dispatcher.command_out)
# now set a correct time
dispatcher.set_datetime_now('2020-01-01T00:00:00')
controller.recalc(systemtime_synced=True)
assert dispatcher.erroro_code == 0, 'Date is valid now, error code should be cleared'
for arg in ['next', 'next_time', 'reason_next']:
assert arg in dispatcher.command_out, 'Argument "{0}" not found in commant_out dict: {1}'.format(arg, dispatcher.command_out)
assert dispatcher.command_out[arg] is not None, 'Argument "{0}" has to be NOT "None" because date is valid: {1}'.format(arg, dispatcher.command_out)
def test_force_mode_static():
# expect that the completeness of the results is already tested
dispatcher = Dispatcher('2020-01-01T00:00:00')
controller = Controller()
controller.init(controller_conf_1, dispatcher)
controller.set_static_time('9:00', '18:00')
controller.set_door_prio('static', 'static')
controller.set_force_operation('auto', None)
# 1. set to static
door_times, door_times_converted =\
timecontrol.recalc_door_times(time_control_1, datetime.fromisoformat('2020-01-01T09:00:00'), None, None)
controller.set_sunbased_time(door_times_converted['sunrise_open_time'],
door_times_converted['sunset_close_time'])
controller.recalc(systemtime_synced=True)
assert dispatcher.command_out['current'] is None, '"current" should be set to none'
assert dispatcher.command_out['reason'] is None, '"reason" should be set to none'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T09:00:00'),\
'"next_time" should be set to "2020-01-01T09:00:00"'
# 2. activate force mode open
controller.set_force_operation('open', '2020-01-01T00:00:00')
assert dispatcher.command_out['current'] == 'open', 'force mode set to "open", should be open now'
# advance time within force_time_expire_minutes
dispatcher.set_datetime_now('2020-01-01T02:59:00')
controller.recalc()
assert dispatcher.command_out['current'] == 'open', 'force mode set to "open", should STILL be open now'
# 3. advance to timeout (after force_time_expire_minutes)
dispatcher.set_datetime_now('2020-01-01T03:00:01')
controller.recalc()
assert dispatcher.command_out['current'] is None, 'force mode timeout. "current" should be None'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T09:00:00'),\
'"next_time" should be set to "2020-01-01T09:00:00"'
# 4. force open before static open
dispatcher.set_datetime_now('2020-01-01T08:30:00')
controller.set_force_operation('open', '2020-01-01T08:30:00')
assert dispatcher.command_out['current'] is 'open', 'force mode set to "open", should be open now'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
# 5. force mode should be still active after static open, but next should be 'close'
dispatcher.set_datetime_now('2020-01-01T09:01:00')
controller.recalc()
assert dispatcher.command_out['current'] is 'open', 'force mode set to "open", should be open now'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "close"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T18:00:00'),\
'"next_time" should be set to "2020-01-01T18:00:00"'
# 6. force mode timeout, static close should still be next
dispatcher.set_datetime_now('2020-01-01T11:30:01')
controller.recalc()
assert dispatcher.command_out['current'] is None, 'force mode timeout. "current" should be None'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "close"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T18:00:00'),\
'"next_time" should be set to "2020-01-01T18:00:00"'
# 7. force close
dispatcher.set_datetime_now('2020-01-01T17:00:00')
controller.set_force_operation('close', '2020-01-01T17:00:00')
assert dispatcher.command_out['current'] is 'close', 'force mode set to "open", should be close now'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "close"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T18:00:00'),\
'"next_time" should be set to "2020-01-01T18:00:00"'
# 8. advance after static close, force close still active
dispatcher.set_datetime_now('2020-01-01T18:01:00')
controller.recalc()
assert dispatcher.command_out['current'] is 'close', 'force mode set to "open", should be close now'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-02T09:00:00'),\
'"next_time" should be set to "2020-01-02T09:00:00"'
# 9. set to auto
controller.set_force_operation('auto', None)
assert dispatcher.command_out['current'] is None, 'force mode set to "auto", "current" should be None'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-02T09:00:00'),\
'"next_time" should be set to "2020-01-02T09:00:00"'
def test_sunbased_control():
# expect that the completeness of the results is already tested
dispatcher = Dispatcher('2020-01-01T00:00:00')
controller = Controller()
controller.init(controller_conf_1, dispatcher)
controller.set_static_time('9:00', '18:00')
controller.set_door_prio('sunbased', 'sunbased')
controller.set_force_operation('auto', None)
# 1. set to static
door_times, door_times_converted =\
timecontrol.recalc_door_times(time_control_1, datetime.fromisoformat('2020-01-01T09:00:00'), None, None)
# door_times_converted =
# {'sunrise_time': '08:17', 'sunset_time': '16:52', 'sunrise_open_time': '08:32', 'sunset_close_time': '17:07'}
controller.set_sunbased_time(door_times_converted['sunrise_open_time'],
door_times_converted['sunset_close_time'])
controller.recalc(systemtime_synced=True)
assert dispatcher.command_out['current'] is None, '"current" should be set to none'
assert dispatcher.command_out['reason'] is None, '"reason" should be set to none'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T08:32:00'),\
'"next_time" should be set to "2020-01-01T08:32:00"'
# 2. activate force mode open
controller.set_force_operation('open', '2020-01-01T00:00:00')
assert dispatcher.command_out['current'] == 'open', 'force mode set to "open", should be open now'
# advance time within force_time_expire_minutes
dispatcher.set_datetime_now('2020-01-01T02:59:00')
controller.recalc()
assert dispatcher.command_out['current'] == 'open', 'force mode set to "open", should STILL be open now'
# 3. advance to timeout (after force_time_expire_minutes)
dispatcher.set_datetime_now('2020-01-01T03:00:01')
controller.recalc()
assert dispatcher.command_out['current'] is None, 'force mode timeout. "current" should be None'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T08:32:00'),\
'"next_time" should be set to "2020-01-01T08:32:00"'
# 4. force open before sunbased open
dispatcher.set_datetime_now('2020-01-01T08:15:00')
controller.set_force_operation('open', '2020-01-01T08:15:00')
assert dispatcher.command_out['current'] is 'open', 'force mode set to "open", should be open now'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
# 5. force mode should be still active after static open, but next should be 'close'
dispatcher.set_datetime_now('2020-01-01T08:45:00')
controller.recalc()
assert dispatcher.command_out['current'] is 'open', 'force mode set to "open", should be open now'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "close"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T17:07:00'),\
'"next_time" should be set to "2020-01-01T17:07:00"'
# 6. force mode timeout, static close should still be next
dispatcher.set_datetime_now('2020-01-01T11:15:01')
controller.recalc()
assert dispatcher.command_out['current'] is None, 'force mode timeout. "current" should be None'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "close"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T17:07:00'),\
'"next_time" should be set to "2020-01-01T17:07:00"'
# 7. force close
dispatcher.set_datetime_now('2020-01-01T17:00:00')
controller.set_force_operation('close', '2020-01-01T17:00:00')
assert dispatcher.command_out['current'] is 'close', 'force mode set to "open", should be close now'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "close"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T17:07:00'),\
'"next_time" should be set to "2020-01-01T17:07:00"'
# 8. advance after static close, force close still active
dispatcher.set_datetime_now('2020-01-01T18:01:00')
controller.recalc()
assert dispatcher.command_out['current'] is 'close', 'force mode set to "open", should be close now'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-02T08:32:00'),\
'"next_time" should be set to "2020-01-02T08:32:00"'
# 9. set to auto
controller.set_force_operation('auto', None)
assert dispatcher.command_out['current'] is None, 'force mode set to "auto", "current" should be None'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-02T08:32:00'),\
'"next_time" should be set to "2020-01-02T08:32:00"'
def test_sunbased_to_static():
# expect that the completeness of the results is already tested
dispatcher = Dispatcher('2020-01-01T00:00:00')
controller = Controller()
controller.init(controller_conf_1, dispatcher)
controller.set_static_time('9:00', '18:00')
controller.set_door_prio('sunbased', 'sunbased')
controller.set_force_operation('auto', None)
# 1. set to static
door_times, door_times_converted =\
timecontrol.recalc_door_times(time_control_1, datetime.fromisoformat('2020-01-01T09:00:00'), None, None)
# door_times_converted =
# {'sunrise_time': '08:17', 'sunset_time': '16:52', 'sunrise_open_time': '08:32', 'sunset_close_time': '17:07'}
controller.set_sunbased_time(door_times_converted['sunrise_open_time'],
door_times_converted['sunset_close_time'])
controller.recalc(systemtime_synced=True)
assert dispatcher.command_out['current'] is None, '"current" should be set to none'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T08:32:00'),\
'"next_time" should be set to "2020-01-01T08:32:00"'
# 2. open: change from sunbased to static
controller.set_door_prio('static', 'sunbased')
assert dispatcher.command_out['current'] is None, '"current" should be set to none'
assert dispatcher.command_out['next'] == 'open', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T09:00:00'),\
'"next_time" should be set to "2020-01-01T09:00:00"'
# 3. change from static to sunbased when the time is over -> close should happen as next
dispatcher.set_datetime_now('2020-01-01T08:45:00')
controller.set_door_prio('sunbased', 'sunbased')
assert dispatcher.command_out['current'] is None, '"current" should be set to none'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'sunbased', '"reason_next" should be set to "sunbased"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T17:07:00'),\
'"next_time" should be set to "2020-01-01T17:07:00"'
# 4. switch close from static to sunbased
dispatcher.set_datetime_now('2020-01-01T17:00:00')
controller.set_door_prio('static', 'static')
assert dispatcher.command_out['current'] is None, '"current" should be set to none'
assert dispatcher.command_out['next'] == 'close', '"next" should be set to "open"'
assert dispatcher.command_out['reason_next'] == 'static', '"reason_next" should be set to "static"'
assert dispatcher.command_out['next_time'] == datetime.fromisoformat('2020-01-01T18:00:00'),\
'"next_time" should be set to "2020-01-01T18:00:00"'
| 55.645062
| 156
| 0.7037
| 2,536
| 18,029
| 4.826893
| 0.065852
| 0.078425
| 0.150315
| 0.182665
| 0.904665
| 0.889552
| 0.878196
| 0.86488
| 0.842333
| 0.835144
| 0
| 0.070238
| 0.161351
| 18,029
| 323
| 157
| 55.817337
| 0.739352
| 0.087914
| 0
| 0.770213
| 0
| 0
| 0.380813
| 0.025843
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.046809
| false
| 0
| 0.012766
| 0.004255
| 0.068085
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f25ff4475b39f96c369a532921032792d62804a0
| 198
|
py
|
Python
|
mygame/dev/game/assets/level01.py
|
edgarjcfn/koding-game-starterkit
|
8034407476f12e41a3f1bd39c78551e29990d9d3
|
[
"MIT"
] | 3
|
2015-04-29T18:58:16.000Z
|
2019-05-29T07:54:32.000Z
|
mygame/dev/game/assets/level01.py
|
edgarjcfn/koding-game-starterkit
|
8034407476f12e41a3f1bd39c78551e29990d9d3
|
[
"MIT"
] | null | null | null |
mygame/dev/game/assets/level01.py
|
edgarjcfn/koding-game-starterkit
|
8034407476f12e41a3f1bd39c78551e29990d9d3
|
[
"MIT"
] | 1
|
2018-10-24T14:50:51.000Z
|
2018-10-24T14:50:51.000Z
|
import mygame
char = mygame.Character();
char.turnRight();
char.moveForward(1);
char.turnRight();
char.moveForward(1);
char.turnRight();
char.moveForward(1);
char.turnRight();
char.moveForward(1);
| 16.5
| 26
| 0.742424
| 25
| 198
| 5.88
| 0.28
| 0.353742
| 0.462585
| 0.761905
| 0.789116
| 0.789116
| 0.789116
| 0.789116
| 0.789116
| 0.789116
| 0
| 0.021739
| 0.070707
| 198
| 11
| 27
| 18
| 0.777174
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
29b2eef08a99a46fc25ac729b7735f99d5ecf83b
| 4,511
|
py
|
Python
|
python/test/test_zero.py
|
EQt/treelas
|
24a5cebf101180822198806c0a4131b0efb7a36d
|
[
"MIT"
] | 3
|
2020-06-18T13:31:26.000Z
|
2021-04-05T17:42:56.000Z
|
python/test/test_zero.py
|
EQt/treelas
|
24a5cebf101180822198806c0a4131b0efb7a36d
|
[
"MIT"
] | null | null | null |
python/test/test_zero.py
|
EQt/treelas
|
24a5cebf101180822198806c0a4131b0efb7a36d
|
[
"MIT"
] | null | null | null |
"""
Test latent nodes (i.e. `mu[i] = 0`);
in particular that decreasing lambdas are handled correctly.
"""
import numpy as np
import pytest
from treelas import TreeInstance
from treelas._treelas import __asan__
def test_nan():
"""`y` contains `NaN`"""
t = TreeInstance(y=np.array( [0., np.nan, np.nan]),
mu=np.array([1., 0., 0.]),
lam=np.array([1.0, 0.5, 0.3]),
parent=np.array([1, 2, 2], dtype=np.int32),
root=2)
if __asan__:
return
with pytest.raises(RuntimeError) as e:
t.solve()
assert 'y[1] = nan' in str(e.value)
t.y = np.array([0., 0, 0])
t.mu = np.array([1., 0., np.nan])
with pytest.raises(RuntimeError) as e:
t.solve()
assert 'mu[2] = nan' in str(e.value)
def test_zero3():
"""Tree with 3 nodes and zero input"""
t = TreeInstance(y=np.array( [0., 0., 0.]),
mu=np.array([1., 0., 0.]),
lam=np.array([1.0, 0.5, 0.3]),
parent=np.array([1, 2, 2], dtype=np.int32),
root=2)
t.solve()
assert np.allclose(t.x, 0)
alpha = t.dual
assert np.where(np.isnan(alpha))[0].tolist() == [t.root], \
f'alpha={alpha}, root={t.root}'
g = t.gamma
assert (g > -1e-10).all(), f'gamma={g}'
assert (g < +1e-10).all(), f'gamma={g}'
v = t.dual_diff
assert (v > -1e-10).all(), f'v={v}\nx={t.x}\nalpha={t.dual}\nlam={t.lam}'
def test_zero3b():
"""Tree with 3 nodes and zero input"""
t = TreeInstance(y=np.array( [0., 0., 3.]),
mu=np.array([1., 0., 0.]),
lam=np.array([1.0, 0.5, 0.3]),
parent=np.array([1, 2, 2], dtype=np.int32),
root=2)
t.solve()
assert np.allclose(t.x, 0)
alpha = t.dual
assert np.where(np.isnan(alpha))[0].tolist() == [t.root], \
f'alpha={alpha}, root={t.root}'
g = t.gamma
assert (g > -1e-10).all(), f'gamma={g}'
assert (g < +1e-10).all(), f'gamma={g}'
v = t.dual_diff
assert (v > -1e-10).all(), f'v={v}\nx={t.x}\nalpha={t.dual}\nlam={t.lam}'
def test_zero3c():
"""Tree with 3 nodes and zero input"""
t = TreeInstance(y=np.array( [0., 0., 3.]),
mu=np.array([1., 0., 1.]),
lam=np.array([1.0, 0.5, 0.3]),
parent=np.array([1, 2, 2], dtype=np.int32),
root=2)
t.solve()
assert np.allclose(t.x, [0.5, 0.5, 2.5])
alpha = t.dual
assert np.where(np.isnan(alpha))[0].tolist() == [t.root], \
f'alpha={alpha}, root={t.root}'
g = t.gamma
assert (g > -1e-10).all(), f'gamma={g}'
assert (g < +1e-10).all(), f'gamma={g}'
v = t.dual_diff
if False:
t.show(wait=False)
assert (v > -1e-10).all(), f'v={v}\nx={t.x}\nalpha={t.dual}\nlam={t.lam}'
def test_zero3d():
"""Tree with 3 nodes and zero input"""
t = TreeInstance(y=np.array( [0., 0., 3.]),
mu=np.array([1., 0., 1.]),
lam=np.array([0.5, 1.0, np.nan]),
parent=np.array([1, 2, 2], dtype=np.int32),
root=2)
t.solve()
assert np.allclose(t.x, [0.5, 2.5, 2.5])
def test_zero4():
"""
Tree with 4 nodes, actually a line graph where just the two end
nodes contain information
"""
t = TreeInstance(y=np.array( [0., 0., 0., 2.]),
mu=np.array([1., 0., 0., 1.]),
lam=np.array([1.0, 0.3, np.nan, 1.0]),
parent=np.array([1, 2, 2, 2], dtype=np.int32),
root=2)
t.solve()
assert np.allclose(t.x, [0.3, 0.3, 1.7, 1.7])
if False:
t.show(wait=False)
alpha = t.dual
assert np.where(np.isnan(alpha))[0].tolist() == [t.root], \
f'alpha={alpha}, root={t.root}'
g = t.gamma
assert (g > -1e-10).all(), f'gamma={g}'
assert (g < +1e-10).all(), f'gamma={g}'
v = t.dual_diff
assert (v > -1e-10).all(), f'v={v}\nx={t.x}\nalpha={t.dual}\nlam={t.lam}'
def test_nonzero():
"""
Similar to test_zero4 but with default mu == 1
"""
t = TreeInstance(y=np.array( [0., 0., 0., 2.]),
mu=np.array([1., 1., 1., 1.]),
lam=np.array([1.0, 0.3, np.nan, 1.0]),
parent=np.array([1, 2, 2, 2], dtype=np.int32),
root=2)
t.solve()
assert np.allclose(t.x, [0.15, 0.15, 0.7, 1.0])
| 33.414815
| 77
| 0.477278
| 728
| 4,511
| 2.928571
| 0.131868
| 0.098499
| 0.078799
| 0.054878
| 0.810038
| 0.791745
| 0.756098
| 0.756098
| 0.755159
| 0.714822
| 0
| 0.070377
| 0.307027
| 4,511
| 134
| 78
| 33.664179
| 0.611644
| 0.085569
| 0
| 0.75
| 0
| 0.038462
| 0.092903
| 0.042385
| 0
| 0
| 0
| 0
| 0.230769
| 1
| 0.067308
| false
| 0
| 0.038462
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29df76aafdabcb013105fb74904f968e6799ef64
| 47
|
py
|
Python
|
mailserver/txmailserver/__init__.py
|
lp-programming/txmailserver
|
f68b01e5d062dfae0e8d872fb9c91d9bcea2323c
|
[
"MIT"
] | null | null | null |
mailserver/txmailserver/__init__.py
|
lp-programming/txmailserver
|
f68b01e5d062dfae0e8d872fb9c91d9bcea2323c
|
[
"MIT"
] | null | null | null |
mailserver/txmailserver/__init__.py
|
lp-programming/txmailserver
|
f68b01e5d062dfae0e8d872fb9c91d9bcea2323c
|
[
"MIT"
] | null | null | null |
from . import reactor
from . import reflection
| 15.666667
| 24
| 0.787234
| 6
| 47
| 6.166667
| 0.666667
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 25
| 23.5
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9a53fbbde9da198f55950ae67a8c5243a637760
| 12,159
|
py
|
Python
|
Script-http-socks4/3.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
Script-http-socks4/3.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
Script-http-socks4/3.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
import marshal,zlib,base64,dis
exe=(marshal.loads(zlib.decompress(base64.b16decode("789CCD9CE9AE2CD95185B3078331982AC04D263954CEF39C9569030284C1603060066386964002D7950779C0EE4402FEC203F0427E363837C2DFF6BE4248EEE6DCDB741F55D7C9618F112B56ACD8A7BFE6FCE89F1FBA8EF3DB4FFFFDE0FB5F749CC7D3CF5BCEE36DE7F18EF378D7797CC279FC945CF969F9F593CEE367E4FBA79CC7CF3A8F9F932F4FAF7C5A7EFDB4DC7A7AE6E79DC7C5795C9DC72FC8F75F94CF5F721E9FF95F9A7A4F3EDF915F7F596E7DE6D5C6DF93EBDAC83B8CF05DB9F2499EFF9FBDBF27175DE7E1BDFABA7E7F7AE5579C87EF3C02E7113A8FC879DC9C472CDF13E7913A8FCC79E4CEA3E06E29772B69FCA9C15A5A7E7AB2711EAD74D74953BD5CB9C82B4F2D0CD2452EB706F919E5D6205DE4D2F853CB937494C8975E6E85D25D2B5F5C195B217767BA1EA4FD561E9BA5771D9527BD3FB5BCC863ABDCBAC9C551BE97F2BD97167AF9BEC8202B692D915F3706E9CB03B3DCD5F1643CEFCA2D5F9A1DA58B3B3F913C7F971733F9F2F4E42EA36AA49DBBACD8D3C54346E8CB94EFF26B26BF6AFB77F9B2C8A4F49995411672B795BB77C6AFE339E4DD963653E96B9796B5D344BEF8FCBAC997505A2818EA45AE2C3C59C997BBDCAA19EA2A8FDD79C697D7CDF70BCD66727D611D6EB20E39ADE93457567E660DAF32851B2F9A9986D6787CC650CA4FCB2DDDCA52A616CBC55EBED42C5126570E697395E575B918CB6847198F2B5BE9C9954DDAECD9534FDEADE4495D2557EE562CAC9A62286FCDD28EBE5BB0029BFC8CEC42252D54D26F850725F2965AE326CBD56212B90C2394065D7CD09567767931908E0E9674665EBBB49FC9C325CB98CBC5595CAC96EB6A691B7EA476A2CBBBCB1735F25AAE7B32CE511E6BE5968FD754ACE1453ABD486B0B2DB7B8B60EE62EBFBA96E5544C24C07E7A1656F167C341D40642ECBC96CF4A3C6EE1FACE807B797D93F5D9A5FD1DBF088038DDD0BBBC72E3B158DA89F1E28A4935B84CC6933376A84BA14B7495C99AC5D9D9B2D432C584EB23C3D8A5E551067C93EF3598A3C870C39E6BA07BC06B5246A5539EE5E1169B3146D5CAEB95DC4DC1FC1AE3514C58E57A2F53305E33C8DD1878BCB1771E0EBBCABE14F2802B0676A3D98C31A8B3543CA0E3DF68E78AFB67D676CCF2702AFB65B6BB958D7071C34C56E99076EE58726E21927A74425F313859B3B9A174614CA867F16B09258A7E0A7A37CCC3976673E957B1F72AEDDF6530B334726343032C7FB6607FB63A5A647891BCAEC8D0CB45DDE8164874D9BB1D48D4D8D7486B09E357F453875D69DF03E21669C1186D80AD86329D555666E3F99077239CE282EBA953AC18F62ABB5910853360AD071646B9BBB30807EE5CF0D8C00AF732121FA4CDA5FDEAD5A5CE718496A0B6482F1798C34C5889E4794F66DA6045A38C500D4C27AB1121626A3BEBA6D0B459C443CD2C66C56AC03607AC6648C5157FD7890CF25DE99371220F8A925A7E7761A17AF07F83575C2D3A9711C40331D48DF85812047371B40462B359DC496167B368C664C50B63F01EAD79329ED9C2A5115C55E2B7C8144A66BA03A1B9BC7B27BC2AE054585A80A754449F5CFA2D65B2BBDCEDA423C5555D8D1B43552E7195BB2577D5323B7C5C4D31E156CBA86218E908F9AC99696D05D900CC512F286476DAA0DAB0FA7B03F234B241830CA096C7DA57434F861565909308A634B1230A8937692DC4987779FE805A5C64C177AE6B232DD35153ACA1850AF22D66B060180B2DFBF26B0E315096E8B1D78BACF08421AD2CF284B95E587F8D5F29281760426A662DB353D332A947C37EA5F240C9C857B8B7CE5A8DA700883C50E2CA3074CA3D313D90062FD2B8FA82E6264A637C78FE2093BA30D359BACBA5BB94B0AE5B7967843AB006D756BF58C19310241F084926B5513F0AE4E18DC4A4C5C72BF9ECA45935B69298D8628ACA8A2342F08621150C7E676D07CC5803A2EEA04F1A15C330EFD057B31421315DCDA063E553F99EE071933C19B2A49B75EB0A04E9AA56F2700079530769D9A09450B812C82E6C90E1D505E0B6B02F0D54F680C6C730B48C0C6BC5A3270867077AEB26AA4977F265B4304127A5E1A9C60D0D615BB1A892B77C42D2C8325E09884AA23A66A409B22E66015A564C33C0A234D1AEE8A86076261D534C508ADEB1623DA1AA22E3ABD9D082CD32E857334E97D01C035F0769D14A363D211AD418C38895F6F2AB1254D50A34B328E47B0AF2A4102D25BA03965F82FC31595287B31BB84EB16ACDAC750B6EAC95C69191D54E65B43179C48D38B201111A2602E9E24240A960773AD498F5BFB1E92D72C795A11A256493E934A48D152EA98B1943C95C964843C60527AAE5E109CEE092D9696EAB1E5DB0DD153CBC00097368D886ADF6F2EB0407B8B1743DC4B827B58FC95916B9D281DE8B958CCCC0944E33E7BB261403D457459E0332AF70D4C1CD560CA607753B606A2320CE246B33301E80961B7ED492482AC753332871259F5119C6D8B050235C7701A33A6047238EC74F0DA16A40D49874B243E39AAC9439274F99F01475C918B055496AC7001A505443C90D6A17621B9AACAD32CE59DAF479516DA0445D492DDD63970772BC2602F76618A961263E48D803FE0709D40C78D6C4C19AE0AB48E581D8310330D13362474A4BDCF319B6092E7778FBC6CAACD60E8E68263738520333D9110F95654556546A881409807F90FF2678871A6A429B9BB5BFBD3CB390114404D909AF5C113DD4781A6CB8919F80509282BDC6F8751FEFF2BD438AC9F002BDA89122E0E2284DDD09CD23661CA1B60DC045CF5EAB3DFBF2FC80D0A4FC73C2ECEF84008582BB251AB8D260009E1800DFC046854A65FB0B165B23ED6E34AE39FB86EABB421E3CB2D491F97AEC85C9460FE441CDC25C5AB8B17D2559F00D0ADA81841301E5407F5096AE6A58CE6A8C04D08C7D3998C881F4DAC3ED5760B3837B846CDF4E74E858F0031D4F89D98E71EA1687848F14813D2130E9062981BF40665C048185DCC72743543F1AA11619D97A8FB9AAD9EFB0CD12F1A706342A52B603D6BAA3865D818B8AF0D11358356558E03913967901136A96C2C5E5430053437C87EE1D118B6B0BFA36A85A06D34E51D83680AB22608578AB426507A13292E64412B491475F2C4BDBD98502D837D6E2C21CD4F80BA0C02CAF22DB4695C40C464DF120F437F86C82F43DE1293339D140A7EA770751E600F9150F4B62B4B1A819592F21852F08C41509C246DA6B4075664F1B8B285E884A07AA6C8214E3B232359DE6481C379C7A224AEEA4FC2DF6AF903EA073C6501D17DDAF451E69091F0D79D941516320306DE040CE080BBA18F1CA85CF1AAEB8405055FC31632B487C0E443CB34A2ECBEB61F91D125684F1E882076051C4306AAB9AA698BF91C626309C1ABA32A0E4A45681499FEC60623B66A3263741546652C5840C28A7B477651F4B485A85FBB888030D524F4A58292C2B5A492ED49C3C626E6C91A5CC0A79312090A3DE5F19B049BA473C254519D6BC6F04330F60A464D635D6D5E2AA6ABA83E5EF2B7DF930D8122963B29C3720AA2EB4636A0D3B60ABF0AEE1D8B5BC2045E5E8B95E31D38011DEC9643D086A0EF9292078B1B4A6FB1E925B5D499162565511A9673C2996D65A4CC327D1482D817AA29071854578F0D203C6AEB0E612D4165847C162CEA4B4116452D9428691A75CAFF0CD1A1FB90162E673837B4F56056DE496BAB052EB88095E98CB489ADF11E3348C4E40C1418C3659BC8FF4BD93946524531B79CD4C531A465510AEC1F6008B9D313FA33F1CF8B821C01DB66152C81597DC61143B7B1D428A22384C875E31E2652EF3BD8181777854C6628EE06D46884C598D0C336E489D12DC6D47E90A28EEF856F4C9194FC3620EC83509339D28667504530FF63290A375D4010F4655586C642638E6604248C9C3880C09C96047795D1F50D80CAD11AA1ED5902E25B889E2CC8D5D18E0D22BB8D460F63DC16282DEB816C7E8C9E26BC0D376408FDA4182F0EBCA907C92D3828AF040FDAE44BA1F89352D0429671146FA5D594C13466308640362EF5863C62234188F66AF39FB3291D14CA4631B44C2C4E88DA4C0B34046739F2BB598D0D2F13CAE97C8FEFAC001352AE82BC202132BEF36DA7E8D63F664F73E59644CD63040D453542C8F89F724803764F603625C40B6431ECB29CD77708086B87025160494D80670A3C5F76796D445D4F5B0D50154F1C80B0248664A927B674F4B507D402A4989530508D0A12A7410D7898E349474F4D8C23A760256823D54A4422B05230F3675A0CD4616E95A89B0252EDF60AE230853308691C4BF8278F8B0FDAB05833188AA62F24A50F330E614EBAAF1C11CCF8D89DD13E2E415D9218336DF99E6049C5EE4D6C86764894B35B1630440021CDC10F500073139F815FEE393C396ECCE88F03258A65E43183A747B8F90DA51098A99CB8E2F876419A60C3A63CC57D2AB81D56B31869D82E96C31CF882D4E18BF8764E47237669D1BFCEB8E7ADFC1A86F6C5C6429CC01B716ABBB8C22548406D2A1E84E963DEF083B01FB5BD0A9CF59A39D75EBC8432F448DC1D2A873EA9E1B7EB4A3604CECA681881D306901EA2B96766523428A4D1EA2D6C1D6DCE0813ECBAE0FB8708390F59911395BC85840C9C608651D42EB48BF353A6D48FC9A19D581D51D68DD1E4332276102625F6F6D53497673906E9780408731D4E4F2A5555C3395DF9114A02431BF00E92595BE1817EE50454A828E7D8C64077B4D21DEB3CEB0F57CE948EE2A6C6F266AB4448A0920CD2DBBED28864EA46F0B3ED5904CADE0C6020FF4A872CE28212B99FB48A23413DF3BF4A590C9E60817072C65B5AA9C3123B962721DDD8D806160253539A96287B2EDE1FE3E696F4A844AD1AC0254DC12FE1012B002EBE4C340244DA12E25DAA049E22654EB0A55D3F0FF0E19708734E688C92D7A9A51E90BB87402C17001ED9A633C156C7607BD0744C880B54A488D036CEC00A30EB8A547E0AE18FFF6AA9BD764790144CB1C938838C75863E40D1B143223A37946AC58033FECA8AC19DDB2B08EE199329FD92097AA59CCC99399E74B8859C9FEAE586301E299D26782198F587240DED402B63BA6EB93F1991386A652B942901298D5CAECAE90AE8E3AE3810B74349293CA45AC584028B958758D3B95B28824744267D0EAD89535B921535C296AC764583B6CC727914C101B77B402237DB804DC8EB03E53EC1B89D13D933D2C3DB6218F6860CB2ED2C702156F500B7DECFF40878F61E34609EF19C6060EE480E740EE5910237CE8DF8CF35EF0FA1C7E78C3CE6352951BEA7ACBA18896D4FB6EC5EBD85AB4117BE880C7DC12C63D02DC0213D09C3101641A88538EED0500EF0273DEA0010B4D35448406253F810924145FCC295CA38C0D9484AEA4F3C6A812CC2C206169C1B7018A12A124779C2932E46D65AD360E1274ECC2CCD69852BEA99FE684E3CD52362AC4EA0392B0417D37CE578C249B17FC3125CC7548AF07F653436C6AA8B56FE5982B55B98CACCA64B5117D7556BD6FC1EAAEAF96E607C8BF4D9C06CBD73A02E80ECCDE98514A54F25F85DC0EF4883954DF02389D55A4B8C0EA6BCB0D7D5896A91D6F50BB94046D04100AEB6C4062655B1DB5FB981DD948876792CD0681D4049D82190D346BCE01268CCA9C595D2D05BBC17D3C4E3C1A4ED500471D19748CDD1ED6499282CCA26560BD75726CE578A18F776F64AC03B63D41934A8266C1984D2219111A2A56BB234B8A59C6C38A2C290272CDBE1F80AA11F07B1C2742DF8860262524ADB2229AA6871ECB15417B3662470063AC09850111FFCE8C568E05EE98AE398278608189958BEDA4E439317A812BE6E086CF5F6754109B16ECBD58C7EA02C2C485E89F4251222C7622E00E48765736ABC4C57C24BB3BB706CC72C5840660B627FA1FD09E1AED25B2CEBA646862151DA5A4789175F2ED06AFB859303860212BDB9193202CE41D3B67808DC566F86608604EE08F4D445DE61293C0969C3B8AADC3E1337BBA32600FEA78A5AC5681933DAE17C2227ACA07058949047C2D704553D10BF1A69A454ED95CDFD2787DA0C3472199499F5D002440DCEB99D1C0312753E05BD0AE0772D809572AD82073642EC0D202F079A64073E0C51E53DB81880B2CC226E42539B2497656C86182CD1CD6A1E2DA92640B38796AC99B21E94F81C075013CDB57A97248F1BDE7A87609735851A55AEB00798E8DB9C4C79B55B3080825052175665573942B5360BAC0F34D997BA0D854A03E0D5029977AD600119A89E6135A7D05EAD6F022CFC2E7D93A8011B04111263720069A64B6B2FEBE26C6895AABEED3F0E48E07957C5F21452987D96E047D755B1FE3D9087F777A3165A688A2F00C1E0684839EA4F246C65413DDAEA4E113AB1160331D32E941E5E5CAC98A11A5A26107CDF1C8105E37221019253684EE1630CF01B1B745B90D09102B967645315EAC34EAB032CD84E32E1300986190215E7CE784C308334910F073387083E3CC5062DDDF3B61B7B3944C43380BEBCF374A28BA0FD76D2973FB6C534721A321E20716AF3850E752C48482532B3B8DF4ACA18720DC10616FA4C625C5FA059A5AC1B2326299F96B328FD28C39337C800C1BFB5B430853EB20CAC25C76F84309189696F87C05CAAED43A3B52EC961602AB5EB910473CEBC4A339941270C27C80131696370D507173AE72848D9B53C119BFFA8403A3109A8D8E60773B21C0E36E83B34780FFF2EA0127174A668A7AB315B5CD89972BE6B7C0881A12DB1153BF410E770A6A13709459FA9EC1CF0EC7DFF0A61E7DC6B094922D0E41A4C952547CEB184C45E6E55A2CC5FCFDC58EF9AD2C4E6AC51D9FDC79B45841C1299A03DBDB71939261EC0CC990B19D13C819667965840B194165F167E32FCA365BC895A9459AF86232D989E99BBF99CA485232EBB4468E9F5EE1DB1946BB035621A32AACC321335424C1CC4AC262C5BEAC50A6840CDAC4D008B33C30D7CD2A261A7631C1C42E9CE4B993CE879C204DF1D30868BD59181EA160EF105A0F71A961017DD4E00A69C59C30CC2C19A1A19D15B334B95B409ABC430F6E8CB022FB3031CE67B2460369C91C4D1DF9467696006825C05B5A6793066AEE318EB06309A9A5C32710788F156BACD05C93084498B1879D9B2322174CE5823263EAEC038F55341E62BA2EE6B150BFB8918E851C2F5CAD93F90999C80C748FA0D3881456207B3660E964258657B6DB9C453155420DAF29407AA0BB46D0E09CFC71B77C6AC0791726D230788FE81C52494F69A7C70C56AA992E0BD5B3503D11BC2322E869BA92F315054E7A582AC795C31E263372AD9A910FAB0C891A13095743A0EF5F3D6BEA522A32E778279CDD449C05EF5808192301CB08473367170BE4411FAFC9903E46E2E342CD2B63FA317E1DE3F807467E477A0D39FC1C12774C89DCA3EB80810528962B995A4ACB1941E7028B0B6159133C7985CE95E0CF9D78B713A96B4BD239F0E81E0C4CA8AEBAE4E9B3A57C9A01AC80D501A4AC149B7608E78C23AFD09501EF3830FBDE2ACC65442BA3F2359C4831C703326A4C937564AB42875FC9046BCB005A0A5EBE4563160B7817EBEE81766AAA3025E7EE12548E05D9D9A3BA11D2638B594E38F5681DB55A112D47048A028F33FF97861BEB99B298173C5A69E41DB7D5575CAA727714B33B1071E059A5750AD4275E873478C1D816FE4EC14760F79994CF79241365CC483288D64289E446472DFAA76FB1C19591F7D46B5CAE5C98EF9D88605A0B39C93323102D38D748DEB1211AACACC3CCDAFA84121F08EA2D00ACA8ECDC78FDCA7A2E8CAD851DC5CCFA0E73B8A36E9968E2592FFA109B3B59CF8819AB8CF659F95FC77CEEC73F5FFFA1FBBEE33C7ED5F9D65BCED71DE785E37CE1EF3CE7FDB79D17EF382FDE755EBCEDFCFB5B2F7FFEFE6DE7FB5FFAE2D397C7AF39EF7EF5E9E32BCDAF3B8E737EF3E9E35FDFD43FE777DE646FD2E33FBDE91E5F76FABD37DFE9F9838F61A24FDD9E1F4BB74F1D273F41C7E73FBC89D19D1F7C3C6B707EE30DF67B7EFB0D4FF21C3E6487E7BF7D4CA6F8A3EEFFF18D747FBE7853B33C3FFB917B3AF78FF8EAF9A9575E3CFFE563DCD2F36B6FA6F3B3F910FD9CD3871ED4F978438B78DE3F5A47E7F691DE3BB39FE8B5F3CF5EC3F4CFDF7D9E46CF3F7DFEC19DBFF75C6D9E7FFCCCA33BBFF87C0D9E5F7AD6C19D7FF15ABCE47CFFB53ADFF995D7D3FCF9D5D735ECF34F9EBBE5F3CBCF3ED6F3AF5FDFAE9D7FFE3ADA3EFFE635DAD9F9F9E768FCFCABD736C4F30F9FB3E9F38F9E77A0E7EF3C4B7BE7EF3FDBB0CEE3A33675FEEDEBC4B3F32F5F53EBE7E73E7AC3E71F3CE3A0CE2F3C5363E76FFE5F1B3ADB0FD3C2CB3CDFF972F389979AD15B4F1F8FF3DDA7CF6F7DEF9BDF955FBF79BEF3F4F9B56FFC40FEFBED17DF7D496A9D1F7F7CF0F6CB77BE7E7EF2E93FBFF19DEF3DFEF9DB2F7EEB65031FFCE7D3C77FFDBFFAF73F9CFF06673C4FEC"))))
dis.dis(exe)
| 3,039.75
| 12,114
| 0.998026
| 16
| 12,159
| 758.4375
| 0.625
| 0.000989
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.632168
| 0.000329
| 12,159
| 3
| 12,115
| 4,053
| 0.366187
| 0
| 0
| 0
| 0
| 0
| 0.991529
| 0.991529
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
d9aac6e9c228d980b5896494b700ac61866fe397
| 181
|
py
|
Python
|
platform/hwconf_data/mgm11/modules/PIN/PIN_Snippets.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | null | null | null |
platform/hwconf_data/mgm11/modules/PIN/PIN_Snippets.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T02:36:22.000Z
|
2020-08-25T02:36:22.000Z
|
platform/hwconf_data/mgm11/modules/PIN/PIN_Snippets.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T01:56:04.000Z
|
2020-08-25T01:56:04.000Z
|
"""
Generated from a template
"""
import mgm11.PythonSnippet.RuntimeModel as RuntimeModel
from mgm11.modules.PIN.PIN_Defs import PORT_PINS
def activate_runtime():
pass
| 10.647059
| 55
| 0.756906
| 23
| 181
| 5.826087
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02649
| 0.165746
| 181
| 16
| 56
| 11.3125
| 0.860927
| 0.138122
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
d9f1055674b269d01e6bfe1866f55f7f42b73139
| 211,863
|
py
|
Python
|
msgraph/cli/command_modules/sites/azext_sites/generated/_params.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | null | null | null |
msgraph/cli/command_modules/sites/azext_sites/generated/_params.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | 22
|
2022-03-29T22:54:37.000Z
|
2022-03-29T22:55:27.000Z
|
msgraph/cli/command_modules/sites/azext_sites/generated/_params.py
|
microsoftgraph/msgraph-cli-archived
|
489f70bf4ede1ce67b84bfb31e66da3e4db76062
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
# pylint: disable=too-many-statements
from azure.cli.core.commands.parameters import get_three_state_flag
from azure.cli.core.commands.validators import validate_file_or_dict
from azext_sites.action import (
AddSharepointIds,
AddApplication,
AddResources,
AddDetails,
AddInnerError,
AddCalculated,
AddChoice,
AddDateTime,
AddDefaultValue,
AddLookup,
AddNumber,
AddPersonOrGroup,
AddText,
AddOrder,
AddColumnLinks,
AddStoragePlanInformation,
AddList,
AddSubscriptions,
AddContentType,
AddVersions,
AddPublication,
AddSitesOnenoteNotebooksSectiongroupsSectionsPagesCommands,
AddSitesOnenoteNotebooksSectionsPagesCommands,
AddSitesOnenotePagesCommands,
AddSitesOnenotePagesParentnotebookSectiongroupsSectionsPagesCommands,
AddSitesOnenotePagesParentnotebookSectionsPagesCommands,
AddSitesOnenotePagesParentsectionPagesCommands,
AddSitesOnenoteSectiongroupsParentnotebookSectionsPagesCommands,
AddSitesOnenoteSectiongroupsSectionsPagesCommands,
AddCommands
)
def load_arguments(self, _):
with self.argument_context('sites group create-site') as c:
c.argument('group_id', type=str, help='key: id of group')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The full title for the site. Read-only.')
c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of column definitions reusable across '
'lists under this site. Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types defined for this '
'site. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('drives', type=validate_file_or_dict, help='The collection of drives (document libraries) under '
'this site. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='Used to address any item contained in this site. This '
'collection cannot be enumerated. Expected value: json-string/json-file/@json-file.')
c.argument('lists', type=validate_file_or_dict, help='The collection of lists under this site. Expected value: '
'json-string/json-file/@json-file.')
c.argument('sites', type=validate_file_or_dict, help='The collection of the sub-sites under this site. '
'Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Onenote')
c.argument('notebooks', type=validate_file_or_dict, help='The collection of OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('operations', type=validate_file_or_dict, help='The status of OneNote operations. Getting an '
'operations collection is not supported, but you can get the status of long-running operations if '
'the Operation-Location header is returned in the response. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('pages', type=validate_file_or_dict, help='The pages in all OneNote notebooks that are owned by the '
'user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('resources', action=AddResources, nargs='+', help='The image and other file resources in OneNote '
'pages. Getting a resources collection is not supported, but you can get the binary content of a '
'specific resource. Read-only. Nullable.', arg_group='Onenote')
c.argument('section_groups', type=validate_file_or_dict, help='The section groups in all OneNote notebooks '
'that are owned by the user or group. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('sections', type=validate_file_or_dict, help='The sections in all OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('data_location_code', type=str, help='The geographic region code for where this site collection '
'resides. Read-only.', arg_group='Site Collection')
c.argument('hostname', type=str, help='The hostname for the site collection. Read-only.', arg_group='Site '
'Collection')
c.argument('microsoft_graph_root', type=validate_file_or_dict, help='root Expected value: '
'json-string/json-file/@json-file.', arg_group='Site Collection')
c.argument('code', type=str, help='', arg_group='Error')
c.argument('details', action=AddDetails, nargs='+', help='', arg_group='Error')
c.argument('inner_error', action=AddInnerError, nargs='+', help='publicInnerError', arg_group='Error')
c.argument('message', type=str, help='', arg_group='Error')
c.argument('target', type=str, help='', arg_group='Error')
with self.argument_context('sites group delete-site') as c:
c.argument('group_id', type=str, help='key: id of group')
c.argument('site_id', type=str, help='key: id of site')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites group list-site') as c:
c.argument('group_id', type=str, help='key: id of group')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites group show-site') as c:
c.argument('group_id', type=str, help='key: id of group')
c.argument('site_id', type=str, help='key: id of site')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites group update-site') as c:
c.argument('group_id', type=str, help='key: id of group')
c.argument('site_id', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The full title for the site. Read-only.')
c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of column definitions reusable across '
'lists under this site. Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types defined for this '
'site. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('drives', type=validate_file_or_dict, help='The collection of drives (document libraries) under '
'this site. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='Used to address any item contained in this site. This '
'collection cannot be enumerated. Expected value: json-string/json-file/@json-file.')
c.argument('lists', type=validate_file_or_dict, help='The collection of lists under this site. Expected value: '
'json-string/json-file/@json-file.')
c.argument('sites', type=validate_file_or_dict, help='The collection of the sub-sites under this site. '
'Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Onenote')
c.argument('notebooks', type=validate_file_or_dict, help='The collection of OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('operations', type=validate_file_or_dict, help='The status of OneNote operations. Getting an '
'operations collection is not supported, but you can get the status of long-running operations if '
'the Operation-Location header is returned in the response. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('pages', type=validate_file_or_dict, help='The pages in all OneNote notebooks that are owned by the '
'user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('resources', action=AddResources, nargs='+', help='The image and other file resources in OneNote '
'pages. Getting a resources collection is not supported, but you can get the binary content of a '
'specific resource. Read-only. Nullable.', arg_group='Onenote')
c.argument('section_groups', type=validate_file_or_dict, help='The section groups in all OneNote notebooks '
'that are owned by the user or group. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('sections', type=validate_file_or_dict, help='The sections in all OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('data_location_code', type=str, help='The geographic region code for where this site collection '
'resides. Read-only.', arg_group='Site Collection')
c.argument('hostname', type=str, help='The hostname for the site collection. Read-only.', arg_group='Site '
'Collection')
c.argument('microsoft_graph_root', type=validate_file_or_dict, help='root Expected value: '
'json-string/json-file/@json-file.', arg_group='Site Collection')
c.argument('code', type=str, help='', arg_group='Error')
c.argument('details', action=AddDetails, nargs='+', help='', arg_group='Error')
c.argument('inner_error', action=AddInnerError, nargs='+', help='publicInnerError', arg_group='Error')
c.argument('message', type=str, help='', arg_group='Error')
c.argument('target', type=str, help='', arg_group='Error')
with self.argument_context('sites sitessite create-site') as c:
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The full title for the site. Read-only.')
c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of column definitions reusable across '
'lists under this site. Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types defined for this '
'site. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('drives', type=validate_file_or_dict, help='The collection of drives (document libraries) under '
'this site. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='Used to address any item contained in this site. This '
'collection cannot be enumerated. Expected value: json-string/json-file/@json-file.')
c.argument('lists', type=validate_file_or_dict, help='The collection of lists under this site. Expected value: '
'json-string/json-file/@json-file.')
c.argument('sites', type=validate_file_or_dict, help='The collection of the sub-sites under this site. '
'Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Onenote')
c.argument('notebooks', type=validate_file_or_dict, help='The collection of OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('operations', type=validate_file_or_dict, help='The status of OneNote operations. Getting an '
'operations collection is not supported, but you can get the status of long-running operations if '
'the Operation-Location header is returned in the response. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('pages', type=validate_file_or_dict, help='The pages in all OneNote notebooks that are owned by the '
'user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('resources', action=AddResources, nargs='+', help='The image and other file resources in OneNote '
'pages. Getting a resources collection is not supported, but you can get the binary content of a '
'specific resource. Read-only. Nullable.', arg_group='Onenote')
c.argument('section_groups', type=validate_file_or_dict, help='The section groups in all OneNote notebooks '
'that are owned by the user or group. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('sections', type=validate_file_or_dict, help='The sections in all OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('data_location_code', type=str, help='The geographic region code for where this site collection '
'resides. Read-only.', arg_group='Site Collection')
c.argument('hostname', type=str, help='The hostname for the site collection. Read-only.', arg_group='Site '
'Collection')
c.argument('microsoft_graph_root', type=validate_file_or_dict, help='root Expected value: '
'json-string/json-file/@json-file.', arg_group='Site Collection')
c.argument('code', type=str, help='', arg_group='Error')
c.argument('details', action=AddDetails, nargs='+', help='', arg_group='Error')
c.argument('inner_error', action=AddInnerError, nargs='+', help='publicInnerError', arg_group='Error')
c.argument('message', type=str, help='', arg_group='Error')
c.argument('target', type=str, help='', arg_group='Error')
with self.argument_context('sites sitessite delete-site') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites sitessite list-site') as c:
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites sitessite show-site') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites sitessite update-site') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The full title for the site. Read-only.')
c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of column definitions reusable across '
'lists under this site. Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types defined for this '
'site. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('drives', type=validate_file_or_dict, help='The collection of drives (document libraries) under '
'this site. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='Used to address any item contained in this site. This '
'collection cannot be enumerated. Expected value: json-string/json-file/@json-file.')
c.argument('lists', type=validate_file_or_dict, help='The collection of lists under this site. Expected value: '
'json-string/json-file/@json-file.')
c.argument('sites', type=validate_file_or_dict, help='The collection of the sub-sites under this site. '
'Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Onenote')
c.argument('notebooks', type=validate_file_or_dict, help='The collection of OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('operations', type=validate_file_or_dict, help='The status of OneNote operations. Getting an '
'operations collection is not supported, but you can get the status of long-running operations if '
'the Operation-Location header is returned in the response. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('pages', type=validate_file_or_dict, help='The pages in all OneNote notebooks that are owned by the '
'user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('resources', action=AddResources, nargs='+', help='The image and other file resources in OneNote '
'pages. Getting a resources collection is not supported, but you can get the binary content of a '
'specific resource. Read-only. Nullable.', arg_group='Onenote')
c.argument('section_groups', type=validate_file_or_dict, help='The section groups in all OneNote notebooks '
'that are owned by the user or group. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('sections', type=validate_file_or_dict, help='The sections in all OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('data_location_code', type=str, help='The geographic region code for where this site collection '
'resides. Read-only.', arg_group='Site Collection')
c.argument('hostname', type=str, help='The hostname for the site collection. Read-only.', arg_group='Site '
'Collection')
c.argument('microsoft_graph_root', type=validate_file_or_dict, help='root Expected value: '
'json-string/json-file/@json-file.', arg_group='Site Collection')
c.argument('code', type=str, help='', arg_group='Error')
c.argument('details', action=AddDetails, nargs='+', help='', arg_group='Error')
c.argument('inner_error', action=AddInnerError, nargs='+', help='publicInnerError', arg_group='Error')
c.argument('message', type=str, help='', arg_group='Error')
c.argument('target', type=str, help='', arg_group='Error')
with self.argument_context('sites site list') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site show') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('path', type=str, help='')
with self.argument_context('sites site create') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The full title for the site. Read-only.')
c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of column definitions reusable across '
'lists under this site. Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types defined for this '
'site. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('drives', type=validate_file_or_dict, help='The collection of drives (document libraries) under '
'this site. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='Used to address any item contained in this site. This '
'collection cannot be enumerated. Expected value: json-string/json-file/@json-file.')
c.argument('lists', type=validate_file_or_dict, help='The collection of lists under this site. Expected value: '
'json-string/json-file/@json-file.')
c.argument('sites', type=validate_file_or_dict, help='The collection of the sub-sites under this site. '
'Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Onenote')
c.argument('notebooks', type=validate_file_or_dict, help='The collection of OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('operations', type=validate_file_or_dict, help='The status of OneNote operations. Getting an '
'operations collection is not supported, but you can get the status of long-running operations if '
'the Operation-Location header is returned in the response. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('pages', type=validate_file_or_dict, help='The pages in all OneNote notebooks that are owned by the '
'user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('resources', action=AddResources, nargs='+', help='The image and other file resources in OneNote '
'pages. Getting a resources collection is not supported, but you can get the binary content of a '
'specific resource. Read-only. Nullable.', arg_group='Onenote')
c.argument('section_groups', type=validate_file_or_dict, help='The section groups in all OneNote notebooks '
'that are owned by the user or group. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('sections', type=validate_file_or_dict, help='The sections in all OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('data_location_code', type=str, help='The geographic region code for where this site collection '
'resides. Read-only.', arg_group='Site Collection')
c.argument('hostname', type=str, help='The hostname for the site collection. Read-only.', arg_group='Site '
'Collection')
c.argument('microsoft_graph_root', type=validate_file_or_dict, help='root Expected value: '
'json-string/json-file/@json-file.', arg_group='Site Collection')
c.argument('code', type=str, help='', arg_group='Error')
c.argument('details', action=AddDetails, nargs='+', help='', arg_group='Error')
c.argument('inner_error', action=AddInnerError, nargs='+', help='publicInnerError', arg_group='Error')
c.argument('message', type=str, help='', arg_group='Error')
c.argument('target', type=str, help='', arg_group='Error')
with self.argument_context('sites site update') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('site_id1', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The full title for the site. Read-only.')
c.argument('root', type=validate_file_or_dict, help='root Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of column definitions reusable across '
'lists under this site. Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types defined for this '
'site. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('drives', type=validate_file_or_dict, help='The collection of drives (document libraries) under '
'this site. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='Used to address any item contained in this site. This '
'collection cannot be enumerated. Expected value: json-string/json-file/@json-file.')
c.argument('lists', type=validate_file_or_dict, help='The collection of lists under this site. Expected value: '
'json-string/json-file/@json-file.')
c.argument('sites', type=validate_file_or_dict, help='The collection of the sub-sites under this site. '
'Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Onenote')
c.argument('notebooks', type=validate_file_or_dict, help='The collection of OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('operations', type=validate_file_or_dict, help='The status of OneNote operations. Getting an '
'operations collection is not supported, but you can get the status of long-running operations if '
'the Operation-Location header is returned in the response. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('pages', type=validate_file_or_dict, help='The pages in all OneNote notebooks that are owned by the '
'user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('resources', action=AddResources, nargs='+', help='The image and other file resources in OneNote '
'pages. Getting a resources collection is not supported, but you can get the binary content of a '
'specific resource. Read-only. Nullable.', arg_group='Onenote')
c.argument('section_groups', type=validate_file_or_dict, help='The section groups in all OneNote notebooks '
'that are owned by the user or group. Read-only. Nullable. Expected value: '
'json-string/json-file/@json-file.', arg_group='Onenote')
c.argument('sections', type=validate_file_or_dict, help='The sections in all OneNote notebooks that are owned '
'by the user or group. Read-only. Nullable. Expected value: json-string/json-file/@json-file.',
arg_group='Onenote')
c.argument('data_location_code', type=str, help='The geographic region code for where this site collection '
'resides. Read-only.', arg_group='Site Collection')
c.argument('hostname', type=str, help='The hostname for the site collection. Read-only.', arg_group='Site '
'Collection')
c.argument('microsoft_graph_root', type=validate_file_or_dict, help='root Expected value: '
'json-string/json-file/@json-file.', arg_group='Site Collection')
c.argument('code', type=str, help='', arg_group='Error')
c.argument('details', action=AddDetails, nargs='+', help='', arg_group='Error')
c.argument('inner_error', action=AddInnerError, nargs='+', help='publicInnerError', arg_group='Error')
c.argument('message', type=str, help='', arg_group='Error')
c.argument('target', type=str, help='', arg_group='Error')
with self.argument_context('sites site add') as c:
c.argument('value', type=validate_file_or_dict, help=' Expected value: json-string/json-file/@json-file.')
with self.argument_context('sites site create-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('boolean', type=validate_file_or_dict, help='booleanColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('calculated', action=AddCalculated, nargs='+', help='calculatedColumn')
c.argument('choice', action=AddChoice, nargs='+', help='choiceColumn')
c.argument('column_group', type=str, help='For site columns, the name of the group this column belongs to. '
'Helps organize related columns.')
c.argument('date_time', action=AddDateTime, nargs='+', help='dateTimeColumn')
c.argument('default_value', action=AddDefaultValue, nargs='+', help='defaultColumnValue')
c.argument('description', type=str, help='The user-facing description of the column.')
c.argument('display_name', type=str, help='The user-facing name of the column.')
c.argument('enforce_unique_values', arg_type=get_three_state_flag(), help='If true, no two list items may have '
'the same value for this column.')
c.argument('geolocation', type=validate_file_or_dict, help='geolocationColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Specifies whether the column is displayed in the '
'user interface.')
c.argument('indexed', arg_type=get_three_state_flag(), help='Specifies whether the column values can used for '
'sorting and searching.')
c.argument('lookup', action=AddLookup, nargs='+', help='lookupColumn')
c.argument('name', type=str, help='The API-facing name of the column as it appears in the [fields][] on a '
'[listItem][]. For the user-facing name, see displayName.')
c.argument('number', action=AddNumber, nargs='+', help='numberColumn')
c.argument('person_or_group', action=AddPersonOrGroup, nargs='+', help='personOrGroupColumn')
c.argument('read_only', arg_type=get_three_state_flag(), help='Specifies whether the column values can be '
'modified.')
c.argument('required', arg_type=get_three_state_flag(), help='Specifies whether the column value is not '
'optional.')
c.argument('text', action=AddText, nargs='+', help='textColumn')
c.argument('locale', type=str, help='Specifies the locale from which to infer the currency symbol.',
arg_group='Currency')
with self.argument_context('sites site create-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('description', type=str, help='The descriptive text for the item.')
c.argument('group', type=str, help='The name of the group this content type belongs to. Helps organize related '
'content types.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Indicates whether the content type is hidden in '
'the list\'s \'New\' menu.')
c.argument('name', type=str, help='The name of the content type.')
c.argument('order', action=AddOrder, nargs='+', help='contentTypeOrder')
c.argument('parent_id', type=str, help='The unique identifier of the content type.')
c.argument('read_only', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified '
'unless this value is first set to false.')
c.argument('sealed', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified by '
'users or through push-down operations. Only site collection administrators can seal or unseal '
'content types.')
c.argument('column_links', action=AddColumnLinks, nargs='+', help='The collection of columns that are required '
'by this content type')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Inherited From')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Inherited From')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Inherited From')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Inherited From')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Inherited '
'From')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Inherited From')
with self.argument_context('sites site create-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_drive_type', type=str, help='Describes the type of drive represented by this '
'resource. OneDrive personal drives will return personal. OneDrive for Business will return '
'business. SharePoint document libraries will return documentLibrary. Read-only.')
c.argument('share_point_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('system', type=validate_file_or_dict, help='systemFacet Expected value: '
'json-string/json-file/@json-file.')
c.argument('following', type=validate_file_or_dict, help='The list of items the user is following. Only in '
'OneDrive for Business. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='All items contained in the drive. Read-only. Nullable. '
'Expected value: json-string/json-file/@json-file.')
c.argument('list', type=validate_file_or_dict, help='list Expected value: json-string/json-file/@json-file.')
c.argument('root', type=validate_file_or_dict, help='driveItem Expected value: json-string/json-file/@json-file'
'.')
c.argument('special', type=validate_file_or_dict, help='Collection of common folders available in OneDrive. '
'Read-only. Nullable. Expected value: json-string/json-file/@json-file.')
c.argument('deleted', type=int, help='Total space consumed by files in the recycle bin, in bytes. Read-only.',
arg_group='Quota')
c.argument('remaining', type=int, help='Total space remaining before reaching the quota limit, in bytes. '
'Read-only.', arg_group='Quota')
c.argument('state', type=str, help='Enumeration value that indicates the state of the storage space. '
'Read-only.', arg_group='Quota')
c.argument('storage_plan_information', action=AddStoragePlanInformation, nargs='+',
help='storagePlanInformation', arg_group='Quota')
c.argument('total', type=int, help='Total allowed storage space, in bytes. Read-only.', arg_group='Quota')
c.argument('used', type=int, help='Total space used, in bytes. Read-only.', arg_group='Quota')
c.argument('application1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
c.argument('device1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
c.argument('user1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
with self.argument_context('sites site create-list') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The displayable title of the list.')
c.argument('list', action=AddList, nargs='+', help='listInfo')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('system', type=validate_file_or_dict, help='systemFacet Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of field definitions for this list. '
'Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types present in this '
'list. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='All items contained in the list. Expected value: '
'json-string/json-file/@json-file.')
c.argument('subscriptions', action=AddSubscriptions, nargs='+', help='The set of subscriptions on the list.')
with self.argument_context('sites site delete-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('column_definition_id', type=str, help='key: id of columnDefinition')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites site delete-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites site delete-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('drive_id', type=str, help='key: id of drive')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites site delete-list') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites site delete-ref-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites site delete-site') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('site_id1', type=str, help='key: id of site')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites site list-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site list-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site list-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site list-list') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site remove') as c:
c.argument('value', type=validate_file_or_dict, help=' Expected value: json-string/json-file/@json-file.')
with self.argument_context('sites site set-ref-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: '
'json-string/json-file/@json-file.')
with self.argument_context('sites site show-activity') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('start_date_time', type=str, help='')
c.argument('end_date_time', type=str, help='')
c.argument('interval', type=str, help='')
with self.argument_context('sites site show-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site show-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('column_definition_id', type=str, help='key: id of columnDefinition')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site show-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site show-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('drive_id', type=str, help='key: id of drive')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site show-list') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site show-ref-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
with self.argument_context('sites site show-site') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('site_id1', type=str, help='key: id of site')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites site update-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('column_definition_id', type=str, help='key: id of columnDefinition')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('boolean', type=validate_file_or_dict, help='booleanColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('calculated', action=AddCalculated, nargs='+', help='calculatedColumn')
c.argument('choice', action=AddChoice, nargs='+', help='choiceColumn')
c.argument('column_group', type=str, help='For site columns, the name of the group this column belongs to. '
'Helps organize related columns.')
c.argument('date_time', action=AddDateTime, nargs='+', help='dateTimeColumn')
c.argument('default_value', action=AddDefaultValue, nargs='+', help='defaultColumnValue')
c.argument('description', type=str, help='The user-facing description of the column.')
c.argument('display_name', type=str, help='The user-facing name of the column.')
c.argument('enforce_unique_values', arg_type=get_three_state_flag(), help='If true, no two list items may have '
'the same value for this column.')
c.argument('geolocation', type=validate_file_or_dict, help='geolocationColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Specifies whether the column is displayed in the '
'user interface.')
c.argument('indexed', arg_type=get_three_state_flag(), help='Specifies whether the column values can used for '
'sorting and searching.')
c.argument('lookup', action=AddLookup, nargs='+', help='lookupColumn')
c.argument('name', type=str, help='The API-facing name of the column as it appears in the [fields][] on a '
'[listItem][]. For the user-facing name, see displayName.')
c.argument('number', action=AddNumber, nargs='+', help='numberColumn')
c.argument('person_or_group', action=AddPersonOrGroup, nargs='+', help='personOrGroupColumn')
c.argument('read_only', arg_type=get_three_state_flag(), help='Specifies whether the column values can be '
'modified.')
c.argument('required', arg_type=get_three_state_flag(), help='Specifies whether the column value is not '
'optional.')
c.argument('text', action=AddText, nargs='+', help='textColumn')
c.argument('locale', type=str, help='Specifies the locale from which to infer the currency symbol.',
arg_group='Currency')
with self.argument_context('sites site update-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('description', type=str, help='The descriptive text for the item.')
c.argument('group', type=str, help='The name of the group this content type belongs to. Helps organize related '
'content types.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Indicates whether the content type is hidden in '
'the list\'s \'New\' menu.')
c.argument('name', type=str, help='The name of the content type.')
c.argument('order', action=AddOrder, nargs='+', help='contentTypeOrder')
c.argument('parent_id', type=str, help='The unique identifier of the content type.')
c.argument('read_only', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified '
'unless this value is first set to false.')
c.argument('sealed', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified by '
'users or through push-down operations. Only site collection administrators can seal or unseal '
'content types.')
c.argument('column_links', action=AddColumnLinks, nargs='+', help='The collection of columns that are required '
'by this content type')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Inherited From')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Inherited From')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Inherited From')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Inherited From')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Inherited '
'From')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Inherited From')
with self.argument_context('sites site update-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('drive_id', type=str, help='key: id of drive')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('microsoft_graph_item_reference_drive_id', type=str, help='Unique identifier of the drive instance '
'that contains the item. Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_drive_type', type=str, help='Describes the type of drive represented by this '
'resource. OneDrive personal drives will return personal. OneDrive for Business will return '
'business. SharePoint document libraries will return documentLibrary. Read-only.')
c.argument('share_point_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('system', type=validate_file_or_dict, help='systemFacet Expected value: '
'json-string/json-file/@json-file.')
c.argument('following', type=validate_file_or_dict, help='The list of items the user is following. Only in '
'OneDrive for Business. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='All items contained in the drive. Read-only. Nullable. '
'Expected value: json-string/json-file/@json-file.')
c.argument('list', type=validate_file_or_dict, help='list Expected value: json-string/json-file/@json-file.')
c.argument('root', type=validate_file_or_dict, help='driveItem Expected value: json-string/json-file/@json-file'
'.')
c.argument('special', type=validate_file_or_dict, help='Collection of common folders available in OneDrive. '
'Read-only. Nullable. Expected value: json-string/json-file/@json-file.')
c.argument('deleted', type=int, help='Total space consumed by files in the recycle bin, in bytes. Read-only.',
arg_group='Quota')
c.argument('remaining', type=int, help='Total space remaining before reaching the quota limit, in bytes. '
'Read-only.', arg_group='Quota')
c.argument('state', type=str, help='Enumeration value that indicates the state of the storage space. '
'Read-only.', arg_group='Quota')
c.argument('storage_plan_information', action=AddStoragePlanInformation, nargs='+',
help='storagePlanInformation', arg_group='Quota')
c.argument('total', type=int, help='Total allowed storage space, in bytes. Read-only.', arg_group='Quota')
c.argument('used', type=int, help='Total space used, in bytes. Read-only.', arg_group='Quota')
c.argument('application1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
c.argument('device1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
c.argument('user1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
with self.argument_context('sites site update-list') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('display_name', type=str, help='The displayable title of the list.')
c.argument('list', action=AddList, nargs='+', help='listInfo')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('system', type=validate_file_or_dict, help='systemFacet Expected value: '
'json-string/json-file/@json-file.')
c.argument('columns', type=validate_file_or_dict, help='The collection of field definitions for this list. '
'Expected value: json-string/json-file/@json-file.')
c.argument('content_types', type=validate_file_or_dict, help='The collection of content types present in this '
'list. Expected value: json-string/json-file/@json-file.')
c.argument('drive', type=validate_file_or_dict,
help='drive Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='All items contained in the list. Expected value: '
'json-string/json-file/@json-file.')
c.argument('subscriptions', action=AddSubscriptions, nargs='+', help='The set of subscriptions on the list.')
with self.argument_context('sites sitescontenttype create-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('name', type=str, help='The name of the column in this content type.')
with self.argument_context('sites sitescontenttype delete-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('column_link_id', type=str, help='key: id of columnLink')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites sitescontenttype list-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites sitescontenttype show-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('column_link_id', type=str, help='key: id of columnLink')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites sitescontenttype update-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('column_link_id', type=str, help='key: id of columnLink')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('name', type=str, help='The name of the column in this content type.')
with self.argument_context('sites siteslist create-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('boolean', type=validate_file_or_dict, help='booleanColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('calculated', action=AddCalculated, nargs='+', help='calculatedColumn')
c.argument('choice', action=AddChoice, nargs='+', help='choiceColumn')
c.argument('column_group', type=str, help='For site columns, the name of the group this column belongs to. '
'Helps organize related columns.')
c.argument('date_time', action=AddDateTime, nargs='+', help='dateTimeColumn')
c.argument('default_value', action=AddDefaultValue, nargs='+', help='defaultColumnValue')
c.argument('description', type=str, help='The user-facing description of the column.')
c.argument('display_name', type=str, help='The user-facing name of the column.')
c.argument('enforce_unique_values', arg_type=get_three_state_flag(), help='If true, no two list items may have '
'the same value for this column.')
c.argument('geolocation', type=validate_file_or_dict, help='geolocationColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Specifies whether the column is displayed in the '
'user interface.')
c.argument('indexed', arg_type=get_three_state_flag(), help='Specifies whether the column values can used for '
'sorting and searching.')
c.argument('lookup', action=AddLookup, nargs='+', help='lookupColumn')
c.argument('name', type=str, help='The API-facing name of the column as it appears in the [fields][] on a '
'[listItem][]. For the user-facing name, see displayName.')
c.argument('number', action=AddNumber, nargs='+', help='numberColumn')
c.argument('person_or_group', action=AddPersonOrGroup, nargs='+', help='personOrGroupColumn')
c.argument('read_only', arg_type=get_three_state_flag(), help='Specifies whether the column values can be '
'modified.')
c.argument('required', arg_type=get_three_state_flag(), help='Specifies whether the column value is not '
'optional.')
c.argument('text', action=AddText, nargs='+', help='textColumn')
c.argument('locale', type=str, help='Specifies the locale from which to infer the currency symbol.',
arg_group='Currency')
with self.argument_context('sites siteslist create-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('description', type=str, help='The descriptive text for the item.')
c.argument('group', type=str, help='The name of the group this content type belongs to. Helps organize related '
'content types.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Indicates whether the content type is hidden in '
'the list\'s \'New\' menu.')
c.argument('name', type=str, help='The name of the content type.')
c.argument('order', action=AddOrder, nargs='+', help='contentTypeOrder')
c.argument('parent_id', type=str, help='The unique identifier of the content type.')
c.argument('read_only', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified '
'unless this value is first set to false.')
c.argument('sealed', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified by '
'users or through push-down operations. Only site collection administrators can seal or unseal '
'content types.')
c.argument('column_links', action=AddColumnLinks, nargs='+', help='The collection of columns that are required '
'by this content type')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Inherited From')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Inherited From')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Inherited From')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Inherited From')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Inherited '
'From')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Inherited From')
with self.argument_context('sites siteslist create-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('content_type_', options_list=['--content-type'], action=AddContentType, nargs='+',
help='contentTypeInfo')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('drive_item', type=validate_file_or_dict, help='driveItem Expected value: '
'json-string/json-file/@json-file.')
c.argument('versions', action=AddVersions, nargs='+', help='The list of previous versions of the list item.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Fields')
with self.argument_context('sites siteslist create-subscription') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('application_id', type=str, help='Identifier of the application used to create the subscription. '
'Read-only.')
c.argument('change_type', type=str, help='Required. Indicates the type of change in the subscribed resource '
'that will raise a change notification. The supported values are: created, updated, deleted. '
'Multiple values can be combined using a comma-separated list.Note: Drive root item and list change '
'notifications support only the updated changeType. User and group change notifications support '
'updated and deleted changeType.')
c.argument('client_state', type=str, help='Optional. Specifies the value of the clientState property sent by '
'the service in each change notification. The maximum length is 128 characters. The client can '
'check that the change notification came from the service by comparing the value of the clientState '
'property sent with the subscription with the value of the clientState property received with each '
'change notification.')
c.argument('creator_id', type=str, help='Identifier of the user or service principal that created the '
'subscription. If the app used delegated permissions to create the subscription, this field '
'contains the id of the signed-in user the app called on behalf of. If the app used application '
'permissions, this field contains the id of the service principal corresponding to the app. '
'Read-only.')
c.argument('encryption_certificate', type=str, help='A base64-encoded representation of a certificate with a '
'public key used to encrypt resource data in change notifications. Optional. Required when '
'includeResourceData is true.')
c.argument('encryption_certificate_id', type=str, help='A custom app-provided identifier to help identify the '
'certificate needed to decrypt resource data. Optional.')
c.argument('expiration_date_time', help='Required. Specifies the date and time when the webhook subscription '
'expires. The time is in UTC, and can be an amount of time from subscription creation that varies '
'for the resource subscribed to. See the table below for maximum supported subscription length of '
'time.')
c.argument('include_resource_data', arg_type=get_three_state_flag(), help='When set to true, change '
'notifications include resource data (such as content of a chat message). Optional.')
c.argument('latest_supported_tls_version', type=str, help='')
c.argument('lifecycle_notification_url', type=str, help='')
c.argument('notification_url', type=str, help='Required. The URL of the endpoint that will receive the change '
'notifications. This URL must make use of the HTTPS protocol.')
c.argument('resource', type=str, help='Required. Specifies the resource that will be monitored for changes. Do '
'not include the base URL (https://graph.microsoft.com/v1.0/). See the possible resource path '
'values for each supported resource.')
with self.argument_context('sites siteslist delete-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('column_definition_id', type=str, help='key: id of columnDefinition')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslist delete-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslist delete-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslist delete-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslist delete-subscription') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('subscription_id', type=str, help='key: id of subscription', id_part='subscription')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslist list-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist list-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist list-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist list-subscription') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist show-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('column_definition_id', type=str, help='key: id of columnDefinition')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist show-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist show-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist show-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist show-subscription') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('subscription_id', type=str, help='key: id of subscription', id_part='subscription')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslist update-column') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('column_definition_id', type=str, help='key: id of columnDefinition')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('boolean', type=validate_file_or_dict, help='booleanColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('calculated', action=AddCalculated, nargs='+', help='calculatedColumn')
c.argument('choice', action=AddChoice, nargs='+', help='choiceColumn')
c.argument('column_group', type=str, help='For site columns, the name of the group this column belongs to. '
'Helps organize related columns.')
c.argument('date_time', action=AddDateTime, nargs='+', help='dateTimeColumn')
c.argument('default_value', action=AddDefaultValue, nargs='+', help='defaultColumnValue')
c.argument('description', type=str, help='The user-facing description of the column.')
c.argument('display_name', type=str, help='The user-facing name of the column.')
c.argument('enforce_unique_values', arg_type=get_three_state_flag(), help='If true, no two list items may have '
'the same value for this column.')
c.argument('geolocation', type=validate_file_or_dict, help='geolocationColumn Expected value: '
'json-string/json-file/@json-file.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Specifies whether the column is displayed in the '
'user interface.')
c.argument('indexed', arg_type=get_three_state_flag(), help='Specifies whether the column values can used for '
'sorting and searching.')
c.argument('lookup', action=AddLookup, nargs='+', help='lookupColumn')
c.argument('name', type=str, help='The API-facing name of the column as it appears in the [fields][] on a '
'[listItem][]. For the user-facing name, see displayName.')
c.argument('number', action=AddNumber, nargs='+', help='numberColumn')
c.argument('person_or_group', action=AddPersonOrGroup, nargs='+', help='personOrGroupColumn')
c.argument('read_only', arg_type=get_three_state_flag(), help='Specifies whether the column values can be '
'modified.')
c.argument('required', arg_type=get_three_state_flag(), help='Specifies whether the column value is not '
'optional.')
c.argument('text', action=AddText, nargs='+', help='textColumn')
c.argument('locale', type=str, help='Specifies the locale from which to infer the currency symbol.',
arg_group='Currency')
with self.argument_context('sites siteslist update-content-type') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('description', type=str, help='The descriptive text for the item.')
c.argument('group', type=str, help='The name of the group this content type belongs to. Helps organize related '
'content types.')
c.argument('hidden', arg_type=get_three_state_flag(), help='Indicates whether the content type is hidden in '
'the list\'s \'New\' menu.')
c.argument('name', type=str, help='The name of the content type.')
c.argument('order', action=AddOrder, nargs='+', help='contentTypeOrder')
c.argument('parent_id', type=str, help='The unique identifier of the content type.')
c.argument('read_only', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified '
'unless this value is first set to false.')
c.argument('sealed', arg_type=get_three_state_flag(), help='If true, the content type cannot be modified by '
'users or through push-down operations. Only site collection administrators can seal or unseal '
'content types.')
c.argument('column_links', action=AddColumnLinks, nargs='+', help='The collection of columns that are required '
'by this content type')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Inherited From')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Inherited From')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Inherited From')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Inherited From')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Inherited From')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Inherited '
'From')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Inherited From')
with self.argument_context('sites siteslist update-drive') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_drive_type', type=str, help='Describes the type of drive represented by this '
'resource. OneDrive personal drives will return personal. OneDrive for Business will return '
'business. SharePoint document libraries will return documentLibrary. Read-only.')
c.argument('share_point_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('system', type=validate_file_or_dict, help='systemFacet Expected value: '
'json-string/json-file/@json-file.')
c.argument('following', type=validate_file_or_dict, help='The list of items the user is following. Only in '
'OneDrive for Business. Expected value: json-string/json-file/@json-file.')
c.argument('items', type=validate_file_or_dict, help='All items contained in the drive. Read-only. Nullable. '
'Expected value: json-string/json-file/@json-file.')
c.argument('list', type=validate_file_or_dict, help='list Expected value: json-string/json-file/@json-file.')
c.argument('root', type=validate_file_or_dict, help='driveItem Expected value: json-string/json-file/@json-file'
'.')
c.argument('special', type=validate_file_or_dict, help='Collection of common folders available in OneDrive. '
'Read-only. Nullable. Expected value: json-string/json-file/@json-file.')
c.argument('deleted', type=int, help='Total space consumed by files in the recycle bin, in bytes. Read-only.',
arg_group='Quota')
c.argument('remaining', type=int, help='Total space remaining before reaching the quota limit, in bytes. '
'Read-only.', arg_group='Quota')
c.argument('state', type=str, help='Enumeration value that indicates the state of the storage space. '
'Read-only.', arg_group='Quota')
c.argument('storage_plan_information', action=AddStoragePlanInformation, nargs='+',
help='storagePlanInformation', arg_group='Quota')
c.argument('total', type=int, help='Total allowed storage space, in bytes. Read-only.', arg_group='Quota')
c.argument('used', type=int, help='Total space used, in bytes. Read-only.', arg_group='Quota')
c.argument('application1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
c.argument('device1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
c.argument('user1', action=AddApplication, nargs='+', help='identity', arg_group='Owner')
with self.argument_context('sites siteslist update-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('created_date_time', help='Date and time of item creation. Read-only.')
c.argument('description', type=str, help='Provides a user-visible description of the item. Optional.')
c.argument('e_tag', type=str, help='ETag for the item. Read-only.')
c.argument('last_modified_date_time', help='Date and time the item was last modified. Read-only.')
c.argument('name', type=str, help='The name of the item. Read-write.')
c.argument('web_url', type=str, help='URL that displays the resource in the browser. Read-only.')
c.argument('created_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory user '
'object. Expected value: json-string/json-file/@json-file.')
c.argument('last_modified_by_user', type=validate_file_or_dict, help='Represents an Azure Active Directory '
'user object. Expected value: json-string/json-file/@json-file.')
c.argument('drive_id', type=str, help='Unique identifier of the drive instance that contains the item. '
'Read-only.', arg_group='Parent Reference')
c.argument('drive_type', type=str, help='Identifies the type of drive. See [drive][] resource for values.',
arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_id', type=str, help='Unique identifier of the item in the drive. '
'Read-only.', arg_group='Parent Reference')
c.argument('microsoft_graph_item_reference_name', type=str, help='The name of the item being referenced. '
'Read-only.', arg_group='Parent Reference')
c.argument('path', type=str, help='Path that can be used to navigate to the item. Read-only.',
arg_group='Parent Reference')
c.argument('share_id', type=str, help='A unique identifier for a shared resource that can be accessed via the '
'[Shares][] API.', arg_group='Parent Reference')
c.argument('sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds', arg_group='Parent '
'Reference')
c.argument('microsoft_graph_item_reference_site_id', type=str, help='', arg_group='Parent Reference')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_identity_application', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_device', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('microsoft_graph_identity_user', action=AddApplication, nargs='+', help='identity',
arg_group='Created By')
c.argument('content_type_', options_list=['--content-type'], action=AddContentType, nargs='+',
help='contentTypeInfo')
c.argument('microsoft_graph_sharepoint_ids', action=AddSharepointIds, nargs='+', help='sharepointIds')
c.argument('analytics', type=validate_file_or_dict, help='itemAnalytics Expected value: '
'json-string/json-file/@json-file.')
c.argument('drive_item', type=validate_file_or_dict, help='driveItem Expected value: '
'json-string/json-file/@json-file.')
c.argument('versions', action=AddVersions, nargs='+', help='The list of previous versions of the list item.')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Fields')
with self.argument_context('sites siteslist update-subscription') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('subscription_id', type=str, help='key: id of subscription', id_part='subscription')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('application_id', type=str, help='Identifier of the application used to create the subscription. '
'Read-only.')
c.argument('change_type', type=str, help='Required. Indicates the type of change in the subscribed resource '
'that will raise a change notification. The supported values are: created, updated, deleted. '
'Multiple values can be combined using a comma-separated list.Note: Drive root item and list change '
'notifications support only the updated changeType. User and group change notifications support '
'updated and deleted changeType.')
c.argument('client_state', type=str, help='Optional. Specifies the value of the clientState property sent by '
'the service in each change notification. The maximum length is 128 characters. The client can '
'check that the change notification came from the service by comparing the value of the clientState '
'property sent with the subscription with the value of the clientState property received with each '
'change notification.')
c.argument('creator_id', type=str, help='Identifier of the user or service principal that created the '
'subscription. If the app used delegated permissions to create the subscription, this field '
'contains the id of the signed-in user the app called on behalf of. If the app used application '
'permissions, this field contains the id of the service principal corresponding to the app. '
'Read-only.')
c.argument('encryption_certificate', type=str, help='A base64-encoded representation of a certificate with a '
'public key used to encrypt resource data in change notifications. Optional. Required when '
'includeResourceData is true.')
c.argument('encryption_certificate_id', type=str, help='A custom app-provided identifier to help identify the '
'certificate needed to decrypt resource data. Optional.')
c.argument('expiration_date_time', help='Required. Specifies the date and time when the webhook subscription '
'expires. The time is in UTC, and can be an amount of time from subscription creation that varies '
'for the resource subscribed to. See the table below for maximum supported subscription length of '
'time.')
c.argument('include_resource_data', arg_type=get_three_state_flag(), help='When set to true, change '
'notifications include resource data (such as content of a chat message). Optional.')
c.argument('latest_supported_tls_version', type=str, help='')
c.argument('lifecycle_notification_url', type=str, help='')
c.argument('notification_url', type=str, help='Required. The URL of the endpoint that will receive the change '
'notifications. This URL must make use of the HTTPS protocol.')
c.argument('resource', type=str, help='Required. Specifies the resource that will be monitored for changes. Do '
'not include the base URL (https://graph.microsoft.com/v1.0/). See the possible resource path '
'values for each supported resource.')
with self.argument_context('sites siteslistscontenttype create-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('name', type=str, help='The name of the column in this content type.')
with self.argument_context('sites siteslistscontenttype delete-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('column_link_id', type=str, help='key: id of columnLink')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslistscontenttype list-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistscontenttype show-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('column_link_id', type=str, help='key: id of columnLink')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistscontenttype update-column-link') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('content_type_id', type=str, help='key: id of contentType')
c.argument('column_link_id', type=str, help='key: id of columnLink')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('name', type=str, help='The name of the column in this content type.')
with self.argument_context('sites siteslistsitem create-version') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('last_modified_date_time', help='Date and time the version was last modified. Read-only.')
c.argument('publication', action=AddPublication, nargs='+', help='publicationFacet')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Fields')
with self.argument_context('sites siteslistsitem delete-drive-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslistsitem delete-field') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslistsitem delete-ref-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslistsitem delete-version') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('list_item_version_id', type=str, help='key: id of listItemVersion')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslistsitem list-version') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistsitem set-ref-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('body', type=validate_file_or_dict, help='New navigation property ref values Expected value: '
'json-string/json-file/@json-file.')
with self.argument_context('sites siteslistsitem show-activity') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('start_date_time', type=str, help='')
c.argument('end_date_time', type=str, help='')
c.argument('interval', type=str, help='')
with self.argument_context('sites siteslistsitem show-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistsitem show-drive-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistsitem show-field') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistsitem show-ref-analytic') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
with self.argument_context('sites siteslistsitem show-version') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('list_item_version_id', type=str, help='key: id of listItemVersion')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistsitem update-drive-item') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('body', type=validate_file_or_dict, help='New navigation property values Expected value: '
'json-string/json-file/@json-file.')
with self.argument_context('sites siteslistsitem update-field') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
with self.argument_context('sites siteslistsitem update-version') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('list_item_version_id', type=str, help='key: id of listItemVersion')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('last_modified_date_time', help='Date and time the version was last modified. Read-only.')
c.argument('publication', action=AddPublication, nargs='+', help='publicationFacet')
c.argument('application', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('device', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('user', action=AddApplication, nargs='+', help='identity', arg_group='Last Modified By')
c.argument('microsoft_graph_entity_id', type=str, help='Read-only.', arg_group='Fields')
with self.argument_context('sites siteslistsitemsversion delete-field') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('list_item_version_id', type=str, help='key: id of listItemVersion')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('sites siteslistsitemsversion restore-version') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('list_item_version_id', type=str, help='key: id of listItemVersion')
with self.argument_context('sites siteslistsitemsversion show-field') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('list_item_version_id', type=str, help='key: id of listItemVersion')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites siteslistsitemsversion update-field') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('list_id', type=str, help='key: id of list')
c.argument('list_item_id', type=str, help='key: id of listItem')
c.argument('list_item_version_id', type=str, help='key: id of listItemVersion')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
with self.argument_context('sites sitesonenotenotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('post_content_schema_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebook get-notebook-from-web-url') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('web_url', type=str, help='')
with self.argument_context('sites sitesonenotenotebook show-recent-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('include_personal_notebooks', arg_type=get_three_state_flag(), help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssectionspage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssectionspage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenoteNotebooksSectiongroupsSectionsPagesCommands, nargs='+', help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssectionspage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotenotebookssectiongroupssectionspagesparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssectionspagesparentsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssectionspagesparentsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectiongroupssectionsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionspage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionspage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenoteNotebooksSectionsPagesCommands, nargs='+', help='')
with self.argument_context('sites sitesonenotenotebookssectionspage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotenotebookssectionspagesparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionspagesparentsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionspagesparentsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionsparentsectiongroupparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionsparentsectiongroupsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotenotebookssectionsparentsectiongroupsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('notebook_id', type=str, help='key: id of notebook')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenotePagesCommands, nargs='+', help='')
with self.argument_context('sites sitesonenotepage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotepagesparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectiongroupsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectiongroupssection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectiongroupssection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectiongroupssectionspage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectiongroupssectionspage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenotePagesParentnotebookSectiongroupsSectionsPagesCommands, nargs='+',
help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectiongroupssectionspage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotepagesparentnotebooksectiongroupssectionsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectionspage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectionspage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenotePagesParentnotebookSectionsPagesCommands, nargs='+', help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectionspage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotepagesparentnotebooksectionsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectionsparentsectiongroupparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectionsparentsectiongroupsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentnotebooksectionsparentsectiongroupsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectionpage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectionpage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenotePagesParentsectionPagesCommands, nargs='+', help='')
with self.argument_context('sites sitesonenotepagesparentsectionpage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_page_id1', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotepagesparentsectionparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectionparentnotebooksectiongroupsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectionparentnotebooksectiongroupssection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectionparentnotebooksectiongroupssection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectionparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectionparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectiongroupparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectiongroupparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectiongroupparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectiongroupsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotepagesparentsectiongroupsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksectionspage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksectionspage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenoteSectiongroupsParentnotebookSectionsPagesCommands, nargs='+',
help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksectionspage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksectionspagesparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksectionspagesparentsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksectionspagesparentsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupsparentnotebooksectionsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionspage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionspage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddSitesOnenoteSectiongroupsSectionsPagesCommands, nargs='+', help='')
with self.argument_context('sites sitesonenotesectiongroupssectionspage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotesectiongroupssectionspagesparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionspagesparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionspagesparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionspagesparentsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionspagesparentsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionsparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectiongroupssectionsparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspage copy-to-section') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspage onenote-patch-content') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('commands', action=AddCommands, nargs='+', help='')
with self.argument_context('sites sitesonenotesectionspage preview') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
with self.argument_context('sites sitesonenotesectionspagesparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspagesparentnotebooksectiongroupsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspagesparentnotebooksectiongroupssection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspagesparentnotebooksectiongroupssection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspagesparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspagesparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspagesparentsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionspagesparentsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_page_id', type=str, help='key: id of onenotePage')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentnotebooksectiongroupsparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentnotebooksectiongroupssection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentnotebooksectiongroupssection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('section_group_id', type=str, help='key: id of sectionGroup')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentsectiongroupparentnotebook copy-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('notebook_folder', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentsectiongroupparentnotebooksection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentsectiongroupparentnotebooksection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentsectiongroupsection copy-to-notebook') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites sitesonenotesectionsparentsectiongroupsection copy-to-section-group') as c:
c.argument('site_id', type=str, help='key: id of site')
c.argument('onenote_section_id', type=str, help='key: id of onenoteSection')
c.argument('onenote_section_id1', type=str, help='key: id of onenoteSection')
c.argument('id_', options_list=['--id'], type=str, help='')
c.argument('group_id', type=str, help='')
c.argument('rename_as', type=str, help='')
c.argument('site_collection_id', type=str, help='')
c.argument('string_site_id', type=str, help='')
with self.argument_context('sites user create-ref-followed-site') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('body', type=validate_file_or_dict, help='New navigation property ref value Expected value: '
'json-string/json-file/@json-file.')
with self.argument_context('sites user list-followed-site') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('sites user list-ref-followed-site') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('orderby', nargs='+', help='Order items by property values')
| 74.363987
| 134
| 0.650217
| 27,885
| 211,863
| 4.80979
| 0.019939
| 0.126893
| 0.111787
| 0.097121
| 0.978206
| 0.977848
| 0.974732
| 0.967678
| 0.967678
| 0.966739
| 0
| 0.000328
| 0.207511
| 211,863
| 2,848
| 135
| 74.390098
| 0.798492
| 0.00253
| 0
| 0.901371
| 0
| 0.008378
| 0.44956
| 0.076463
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000381
| false
| 0
| 0.001142
| 0
| 0.001523
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a066deefbdb950c69247545d1a2b0440170e4f3
| 33,838
|
py
|
Python
|
sdk/lusid/api/reference_portfolio_api.py
|
fossabot/lusid-sdk-python
|
154a0232a00026d79379aec7196555f24d742ade
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/reference_portfolio_api.py
|
fossabot/lusid-sdk-python
|
154a0232a00026d79379aec7196555f24d742ade
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/reference_portfolio_api.py
|
fossabot/lusid-sdk-python
|
154a0232a00026d79379aec7196555f24d742ade
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.2321
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lusid.api_client import ApiClient
from lusid.exceptions import (
ApiTypeError,
ApiValueError
)
class ReferencePortfolioApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_reference_portfolio(self, scope, create_reference_portfolio_request, **kwargs): # noqa: E501
"""Create reference portfolio # noqa: E501
Create a new reference portfolio. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_reference_portfolio(scope, create_reference_portfolio_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The intended scope of the portfolio (required)
:param CreateReferencePortfolioRequest create_reference_portfolio_request: The portfolio creation request object (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Portfolio
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_reference_portfolio_with_http_info(scope, create_reference_portfolio_request, **kwargs) # noqa: E501
def create_reference_portfolio_with_http_info(self, scope, create_reference_portfolio_request, **kwargs): # noqa: E501
"""Create reference portfolio # noqa: E501
Create a new reference portfolio. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_reference_portfolio_with_http_info(scope, create_reference_portfolio_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The intended scope of the portfolio (required)
:param CreateReferencePortfolioRequest create_reference_portfolio_request: The portfolio creation request object (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Portfolio, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'create_reference_portfolio_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_reference_portfolio" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'create_reference_portfolio_request' is set
if ('create_reference_portfolio_request' not in local_var_params or
local_var_params['create_reference_portfolio_request'] is None):
raise ApiValueError("Missing the required parameter `create_reference_portfolio_request` when calling `create_reference_portfolio`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_reference_portfolio_request' in local_var_params:
body_params = local_var_params['create_reference_portfolio_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2321'
return self.api_client.call_api(
'/api/referenceportfolios/{scope}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Portfolio', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_reference_portfolio_constituents(self, scope, code, **kwargs): # noqa: E501
"""Get constituents # noqa: E501
Get constituents from the specified reference portfolio at an effective time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_reference_portfolio_constituents(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the reference portfolio. (required)
:param str code: The code of the reference portfolio. Together with the scope this uniquely identifies the reference portfolio. (required)
:param str effective_at: The effective date of the constituents to retrieve. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve constituents. Defaults to return the latest version of each constituent if not specified.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"ReferenceHolding\" domain to decorate onto the constituents. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"ReferenceHolding/strategy/quantsignal\". Defaults to return all available instrument and reference holding properties if not specified.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: GetReferencePortfolioConstituentsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_reference_portfolio_constituents_with_http_info(scope, code, **kwargs) # noqa: E501
def get_reference_portfolio_constituents_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""Get constituents # noqa: E501
Get constituents from the specified reference portfolio at an effective time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_reference_portfolio_constituents_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the reference portfolio. (required)
:param str code: The code of the reference portfolio. Together with the scope this uniquely identifies the reference portfolio. (required)
:param str effective_at: The effective date of the constituents to retrieve. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve constituents. Defaults to return the latest version of each constituent if not specified.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"ReferenceHolding\" domain to decorate onto the constituents. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"ReferenceHolding/strategy/quantsignal\". Defaults to return all available instrument and reference holding properties if not specified.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(GetReferencePortfolioConstituentsResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'as_at', 'property_keys'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_reference_portfolio_constituents" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_reference_portfolio_constituents`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_reference_portfolio_constituents`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_reference_portfolio_constituents`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_reference_portfolio_constituents`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_reference_portfolio_constituents`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_reference_portfolio_constituents`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'property_keys' in local_var_params:
query_params.append(('propertyKeys', local_var_params['property_keys'])) # noqa: E501
collection_formats['propertyKeys'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2321'
return self.api_client.call_api(
'/api/referenceportfolios/{scope}/{code}/constituents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetReferencePortfolioConstituentsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_constituents_adjustments(self, scope, code, from_effective_at, to_effective_at, **kwargs): # noqa: E501
"""List constituents adjustments # noqa: E501
List the constituent adjustments made to the specified reference portfolio over a specified interval of effective time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_constituents_adjustments(scope, code, from_effective_at, to_effective_at, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio (required)
:param str code: Code for the portfolio (required)
:param str from_effective_at: Events between this time (inclusive) and the toEffectiveAt are returned. (required)
:param str to_effective_at: Events between this time (inclusive) and the fromEffectiveAt are returned. (required)
:param datetime as_at_time: The as-at time for which the result is valid.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ResourceListOfConstituentsAdjustmentHeader
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_constituents_adjustments_with_http_info(scope, code, from_effective_at, to_effective_at, **kwargs) # noqa: E501
def list_constituents_adjustments_with_http_info(self, scope, code, from_effective_at, to_effective_at, **kwargs): # noqa: E501
"""List constituents adjustments # noqa: E501
List the constituent adjustments made to the specified reference portfolio over a specified interval of effective time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_constituents_adjustments_with_http_info(scope, code, from_effective_at, to_effective_at, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio (required)
:param str code: Code for the portfolio (required)
:param str from_effective_at: Events between this time (inclusive) and the toEffectiveAt are returned. (required)
:param str to_effective_at: Events between this time (inclusive) and the fromEffectiveAt are returned. (required)
:param datetime as_at_time: The as-at time for which the result is valid.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ResourceListOfConstituentsAdjustmentHeader, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'from_effective_at', 'to_effective_at', 'as_at_time'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_constituents_adjustments" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'from_effective_at' is set
if ('from_effective_at' not in local_var_params or
local_var_params['from_effective_at'] is None):
raise ApiValueError("Missing the required parameter `from_effective_at` when calling `list_constituents_adjustments`") # noqa: E501
# verify the required parameter 'to_effective_at' is set
if ('to_effective_at' not in local_var_params or
local_var_params['to_effective_at'] is None):
raise ApiValueError("Missing the required parameter `to_effective_at` when calling `list_constituents_adjustments`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `list_constituents_adjustments`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `list_constituents_adjustments`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `list_constituents_adjustments`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `list_constituents_adjustments`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `list_constituents_adjustments`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `list_constituents_adjustments`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'from_effective_at' in local_var_params:
query_params.append(('fromEffectiveAt', local_var_params['from_effective_at'])) # noqa: E501
if 'to_effective_at' in local_var_params:
query_params.append(('toEffectiveAt', local_var_params['to_effective_at'])) # noqa: E501
if 'as_at_time' in local_var_params:
query_params.append(('asAtTime', local_var_params['as_at_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2321'
return self.api_client.call_api(
'/api/referenceportfolios/{scope}/{code}/constituentsadjustments', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceListOfConstituentsAdjustmentHeader', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def upsert_reference_portfolio_constituents(self, scope, code, upsert_reference_portfolio_constituents_request, **kwargs): # noqa: E501
"""Add constituents # noqa: E501
Add constituents to the specified reference portfolio. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upsert_reference_portfolio_constituents(scope, code, upsert_reference_portfolio_constituents_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio (required)
:param str code: The code of the portfolio (required)
:param UpsertReferencePortfolioConstituentsRequest upsert_reference_portfolio_constituents_request: The constituents to upload to the portfolio (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: UpsertReferencePortfolioConstituentsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.upsert_reference_portfolio_constituents_with_http_info(scope, code, upsert_reference_portfolio_constituents_request, **kwargs) # noqa: E501
def upsert_reference_portfolio_constituents_with_http_info(self, scope, code, upsert_reference_portfolio_constituents_request, **kwargs): # noqa: E501
"""Add constituents # noqa: E501
Add constituents to the specified reference portfolio. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upsert_reference_portfolio_constituents_with_http_info(scope, code, upsert_reference_portfolio_constituents_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio (required)
:param str code: The code of the portfolio (required)
:param UpsertReferencePortfolioConstituentsRequest upsert_reference_portfolio_constituents_request: The constituents to upload to the portfolio (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(UpsertReferencePortfolioConstituentsResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'upsert_reference_portfolio_constituents_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method upsert_reference_portfolio_constituents" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'upsert_reference_portfolio_constituents_request' is set
if ('upsert_reference_portfolio_constituents_request' not in local_var_params or
local_var_params['upsert_reference_portfolio_constituents_request'] is None):
raise ApiValueError("Missing the required parameter `upsert_reference_portfolio_constituents_request` when calling `upsert_reference_portfolio_constituents`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `upsert_reference_portfolio_constituents`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `upsert_reference_portfolio_constituents`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `upsert_reference_portfolio_constituents`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `upsert_reference_portfolio_constituents`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `upsert_reference_portfolio_constituents`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `upsert_reference_portfolio_constituents`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'upsert_reference_portfolio_constituents_request' in local_var_params:
body_params = local_var_params['upsert_reference_portfolio_constituents_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2321'
return self.api_client.call_api(
'/api/referenceportfolios/{scope}/{code}/constituents', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpsertReferencePortfolioConstituentsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 57.352542
| 398
| 0.653319
| 3,967
| 33,838
| 5.335518
| 0.066297
| 0.043088
| 0.070112
| 0.027969
| 0.938156
| 0.928517
| 0.911367
| 0.898658
| 0.887556
| 0.863744
| 0
| 0.01603
| 0.266239
| 33,838
| 589
| 399
| 57.449915
| 0.836441
| 0.417814
| 0
| 0.680556
| 1
| 0.0625
| 0.322818
| 0.121492
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.017361
| 0
| 0.079861
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a1c9238f8102a05f16e9409558e2fceb2f7fb09
| 8,773
|
py
|
Python
|
tests/test_success_range_above.py
|
Bernardo-MG/wargame_analysis_jupyter_notebook
|
db13838ce0f8c6dcbc160259c1ee0ae258b51ba7
|
[
"MIT"
] | null | null | null |
tests/test_success_range_above.py
|
Bernardo-MG/wargame_analysis_jupyter_notebook
|
db13838ce0f8c6dcbc160259c1ee0ae258b51ba7
|
[
"MIT"
] | null | null | null |
tests/test_success_range_above.py
|
Bernardo-MG/wargame_analysis_jupyter_notebook
|
db13838ce0f8c6dcbc160259c1ee0ae258b51ba7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from decimal import Decimal
from scripts.probability import roll_success_range
"""
Max shots script tests.
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
class TestZeroToTenAboveNotEqual(unittest.TestCase):
"""
Tests the chance to go above with the range [0,10].
"""
def test_goal_0(self):
chance = roll_success_range(0, 10, 0, above=True, equal=False)
self.assertEqual({"min": 1, "max": 10}, chance)
def test_goal_1(self):
chance = roll_success_range(0, 10, 1, above=True, equal=False)
self.assertEqual({"min": 2, "max": 10}, chance)
def test_goal_2(self):
chance = roll_success_range(0, 10, 2, above=True, equal=False)
self.assertEqual({"min": 3, "max": 10}, chance)
def test_goal_3(self):
chance = roll_success_range(0, 10, 3, above=True, equal=False)
self.assertEqual({"min": 4, "max": 10}, chance)
def test_goal_4(self):
chance = roll_success_range(0, 10, 4, above=True, equal=False)
self.assertEqual({"min": 5, "max": 10}, chance)
def test_goal_5(self):
chance = roll_success_range(0, 10, 5, above=True, equal=False)
self.assertEqual({"min": 6, "max": 10}, chance)
def test_goal_6(self):
chance = roll_success_range(0, 10, 6, above=True, equal=False)
self.assertEqual({"min": 7, "max": 10}, chance)
def test_goal_7(self):
chance = roll_success_range(0, 10, 7, above=True, equal=False)
self.assertEqual({"min": 8, "max": 10}, chance)
def test_goal_8(self):
chance = roll_success_range(0, 10, 8, above=True, equal=False)
self.assertEqual({"min": 9, "max": 10}, chance)
def test_goal_9(self):
chance = roll_success_range(0, 10, 9, above=True, equal=False)
self.assertEqual({"min": 10, "max": 10}, chance)
def test_goal_10(self):
chance = roll_success_range(0, 10, 10, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_above_max(self):
chance = roll_success_range(0, 10, 20, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_below_min(self):
chance = roll_success_range(0, 10, -1, above=True, equal=False)
self.assertEqual({"min": 0, "max": 10}, chance)
class TestOneToTenAboveNotEqual(unittest.TestCase):
"""
Tests the chance to go above with the range [1,10].
"""
def test_goal_1(self):
chance = roll_success_range(1, 10, 1, above=True, equal=False)
self.assertEqual({"min": 2, "max": 10}, chance)
def test_goal_2(self):
chance = roll_success_range(1, 10, 2, above=True, equal=False)
self.assertEqual({"min": 3, "max": 10}, chance)
def test_goal_3(self):
chance = roll_success_range(1, 10, 3, above=True, equal=False)
self.assertEqual({"min": 4, "max": 10}, chance)
def test_goal_4(self):
chance = roll_success_range(1, 10, 4, above=True, equal=False)
self.assertEqual({"min": 5, "max": 10}, chance)
def test_goal_5(self):
chance = roll_success_range(1, 10, 5, above=True, equal=False)
self.assertEqual({"min": 6, "max": 10}, chance)
def test_goal_6(self):
chance = roll_success_range(1, 10, 6, above=True, equal=False)
self.assertEqual({"min": 7, "max": 10}, chance)
def test_goal_7(self):
chance = roll_success_range(1, 10, 7, above=True, equal=False)
self.assertEqual({"min": 8, "max": 10}, chance)
def test_goal_8(self):
chance = roll_success_range(1, 10, 8, above=True, equal=False)
self.assertEqual({"min": 9, "max": 10}, chance)
def test_goal_9(self):
chance = roll_success_range(1, 10, 9, above=True, equal=False)
self.assertEqual({"min": 10, "max": 10}, chance)
def test_goal_10(self):
chance = roll_success_range(1, 10, 10, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_above_max(self):
chance = roll_success_range(1, 10, 20, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_below_min(self):
chance = roll_success_range(1, 10, 0, above=True, equal=False)
self.assertEqual({"min": 1, "max": 10}, chance)
class TestTenToOneHundredAboveNotEqual(unittest.TestCase):
"""
Tests the chance to go above with the range [10,100].
"""
def test_goal_0(self):
chance = roll_success_range(10, 100, 0, above=True, equal=False)
self.assertEqual({"min": 10, "max": 100}, chance)
def test_goal_at_max(self):
chance = roll_success_range(10, 100, 100, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_at_min(self):
chance = roll_success_range(10, 100, 10, above=True, equal=False)
self.assertEqual({"min": 11, "max": 100}, chance)
def test_goal_at_middle(self):
chance = roll_success_range(10, 100, 50, above=True, equal=False)
self.assertEqual({"min": 51, "max": 100}, chance)
def test_goal_close_to_max(self):
chance = roll_success_range(10, 100, 80, above=True, equal=False)
self.assertEqual({"min": 81, "max": 100}, chance)
def test_goal_above_max(self):
chance = roll_success_range(10, 100, 200, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_just_below_middle(self):
chance = roll_success_range(10, 100, 40, above=True, equal=False)
self.assertEqual({"min": 41, "max": 100}, chance)
def test_goal_just_below_max(self):
chance = roll_success_range(10, 100, 90, above=True, equal=False)
self.assertEqual({"min": 91, "max": 100}, chance)
def test_goal_below_min(self):
chance = roll_success_range(10, 100, 5, above=True, equal=False)
self.assertEqual({"min": 10, "max": 100}, chance)
def test_goal_just_below_min(self):
chance = roll_success_range(10, 100, 9, above=True, equal=False)
self.assertEqual({"min": 10, "max": 100}, chance)
class Test1d6AboveNotEqual(unittest.TestCase):
"""
Tests the chance to go above with the range [1,6], which is the range of a six sides die.
"""
def test_no_goal(self):
chance = roll_success_range(1, 6, 0, above=True, equal=False)
self.assertEqual({"min": 1, "max": 6}, chance)
def test_goal_above_max(self):
chance = roll_success_range(1, 6, 10, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_1(self):
chance = roll_success_range(1, 6, 1, above=True, equal=False)
self.assertEqual({"min": 2, "max": 6}, chance)
def test_goal_2(self):
chance = roll_success_range(1, 6, 2, above=True, equal=False)
self.assertEqual({"min": 3, "max": 6}, chance)
def test_goal_3(self):
chance = roll_success_range(1, 6, 3, above=True, equal=False)
self.assertEqual({"min": 4, "max": 6}, chance)
def test_goal_4(self):
chance = roll_success_range(1, 6, 4, above=True, equal=False)
self.assertEqual({"min": 5, "max": 6}, chance)
def test_goal_5(self):
chance = roll_success_range(1, 6, 5, above=True, equal=False)
self.assertEqual({"min": 6, "max": 6}, chance)
def test_goal_6(self):
chance = roll_success_range(1, 6, 6, above=True, equal=False)
self.assertEqual(None, chance)
class Test1d6Norm0AboveNotEqual(unittest.TestCase):
"""
Tests the chance to go above with the range [1,6], which is the range of a six sides die.
"""
def test_goal_above_max(self):
chance = roll_success_range(1, 6, 10, above=True, equal=False)
self.assertEqual(None, chance)
def test_goal_0(self):
chance = roll_success_range(0, 5, 0, above=True, equal=False)
self.assertEqual({"min": 1, "max": 5}, chance)
def test_goal_1(self):
chance = roll_success_range(0, 5, 1, above=True, equal=False)
self.assertEqual({"min": 2, "max": 5}, chance)
def test_goal_2(self):
chance = roll_success_range(0, 5, 2, above=True, equal=False)
self.assertEqual({"min": 3, "max": 5}, chance)
def test_goal_3(self):
chance = roll_success_range(0, 5, 3, above=True, equal=False)
self.assertEqual({"min": 4, "max": 5}, chance)
def test_goal_4(self):
chance = roll_success_range(0, 5, 4, above=True, equal=False)
self.assertEqual({"min": 5, "max": 5}, chance)
def test_goal_5(self):
chance = roll_success_range(0, 5, 5, above=True, equal=False)
self.assertEqual(None, chance)
| 25.502907
| 93
| 0.627607
| 1,258
| 8,773
| 4.194754
| 0.063593
| 0.10631
| 0.154633
| 0.198977
| 0.9314
| 0.931021
| 0.920599
| 0.869054
| 0.804434
| 0.788895
| 0
| 0.059746
| 0.229226
| 8,773
| 343
| 94
| 25.577259
| 0.720645
| 0.041035
| 0
| 0.48125
| 0
| 0
| 0.032266
| 0
| 0
| 0
| 0
| 0
| 0.3125
| 1
| 0.3125
| false
| 0
| 0.01875
| 0
| 0.3625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a3da151e11ff976d5e21ea35ff74cc6cbcf3be3
| 177
|
py
|
Python
|
functions/registration/__init__.py
|
hsokooti/RegNet
|
28a8b6132677bb58e9fc811c0dd15d78913c7e86
|
[
"Apache-2.0"
] | 187
|
2018-01-08T11:37:12.000Z
|
2022-03-03T06:26:43.000Z
|
functions/registration/__init__.py
|
hsokooti/RegNet
|
28a8b6132677bb58e9fc811c0dd15d78913c7e86
|
[
"Apache-2.0"
] | 15
|
2018-01-27T07:12:41.000Z
|
2021-08-22T12:37:30.000Z
|
functions/registration/__init__.py
|
hsokooti/RegNet
|
28a8b6132677bb58e9fc811c0dd15d78913c7e86
|
[
"Apache-2.0"
] | 60
|
2018-01-09T13:00:32.000Z
|
2021-06-15T03:02:36.000Z
|
from .multi_stage import multi_stage
from .multi_stage import calculate_jacobian
from .multi_stage_error import multi_stage_error
__version__ = "0.3.0"
__author__ = "hsokooti"
| 25.285714
| 48
| 0.830508
| 26
| 177
| 5.038462
| 0.461538
| 0.381679
| 0.320611
| 0.305344
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018987
| 0.107345
| 177
| 6
| 49
| 29.5
| 0.810127
| 0
| 0
| 0
| 0
| 0
| 0.073446
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8a748b0f539d083ccd52908bf91537ba4d02d653
| 44,215
|
py
|
Python
|
Data/Packages/backrefs/st3/backrefs/uniprops/unidata/age.py
|
koery/win-sublime
|
1b16cbe9858eced52567971286109250df787d36
|
[
"MIT"
] | 182
|
2017-03-05T07:43:13.000Z
|
2022-03-15T13:09:07.000Z
|
Data/Packages/backrefs/st3/backrefs/uniprops/unidata/age.py
|
koery/win-sublime
|
1b16cbe9858eced52567971286109250df787d36
|
[
"MIT"
] | null | null | null |
Data/Packages/backrefs/st3/backrefs/uniprops/unidata/age.py
|
koery/win-sublime
|
1b16cbe9858eced52567971286109250df787d36
|
[
"MIT"
] | 16
|
2017-03-07T11:01:27.000Z
|
2022-01-08T09:21:01.000Z
|
"""Unicode Properties (autogen)."""
from __future__ import unicode_literals
unicode_age = {
"1.1": "\u0000-\u01f5\u01fa-\u0217\u0250-\u02a8\u02b0-\u02de\u02e0-\u02e9\u0300-\u0345\u0360-\u0361\u0374-\u0375\u037a\u037e\u0384-\u038a\u038c\u038e-\u03a1\u03a3-\u03ce\u03d0-\u03d6\u03da\u03dc\u03de\u03e0\u03e2-\u03f3\u0401-\u040c\u040e-\u044f\u0451-\u045c\u045e-\u0486\u0490-\u04c4\u04c7-\u04c8\u04cb-\u04cc\u04d0-\u04eb\u04ee-\u04f5\u04f8-\u04f9\u0531-\u0556\u0559-\u055f\u0561-\u0587\u0589\u05b0-\u05b9\u05bb-\u05c3\u05d0-\u05ea\u05f0-\u05f4\u060c\u061b\u061f\u0621-\u063a\u0640-\u0652\u0660-\u066d\u0670-\u06b7\u06ba-\u06be\u06c0-\u06ce\u06d0-\u06ed\u06f0-\u06f9\u0901-\u0903\u0905-\u0939\u093c-\u094d\u0950-\u0954\u0958-\u0970\u0981-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc\u09be-\u09c4\u09c7-\u09c8\u09cb-\u09cd\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09fa\u0a02\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a59-\u0a5c\u0a5e\u0a66-\u0a74\u0a81-\u0a83\u0a85-\u0a8b\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0\u0ae6-\u0aef\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b36-\u0b39\u0b3c-\u0b43\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b61\u0b66-\u0b70\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb5\u0bb7-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be7-\u0bf2\u0c01-\u0c03\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c60-\u0c61\u0c66-\u0c6f\u0c82-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce1\u0ce6-\u0cef\u0d02-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d28\u0d2a-\u0d39\u0d3e-\u0d43\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d60-\u0d61\u0d66-\u0d6f\u0e01-\u0e3a\u0e3f-\u0e5b\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edd\u10a0-\u10c5\u10d0-\u10f6\u10fb\u1100-\u1159\u115f-\u11a2\u11a8-\u11f9\u1e00-\u1e9a\u1ea0-\u1ef9\u1f00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fc4\u1fc6-\u1fd3\u1fd6-\u1fdb\u1fdd-\u1fef\u1ff2-\u1ff4\u1ff6-\u1ffe\u2000-\u202e\u2030-\u2046\u206a-\u2070\u2074-\u208e\u20a0-\u20aa\u20d0-\u20e1\u2100-\u2138\u2153-\u2182\u2190-\u21ea\u2200-\u22f1\u2300\u2302-\u237a\u2400-\u2424\u2440-\u244a\u2460-\u24ea\u2500-\u2595\u25a0-\u25ef\u2600-\u2613\u261a-\u266f\u2701-\u2704\u2706-\u2709\u270c-\u2727\u2729-\u274b\u274d\u274f-\u2752\u2756\u2758-\u275e\u2761-\u2767\u2776-\u2794\u2798-\u27af\u27b1-\u27be\u3000-\u3037\u303f\u3041-\u3094\u3099-\u309e\u30a1-\u30fe\u3105-\u312c\u3131-\u318e\u3190-\u319f\u3200-\u321c\u3220-\u3243\u3260-\u327b\u327f-\u32b0\u32c0-\u32cb\u32d0-\u32fe\u3300-\u3376\u337b-\u33dd\u33e0-\u33fe\u4e00-\u9fa5\ue000-\ufa2d\ufb00-\ufb06\ufb13-\ufb17\ufb1e-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3f\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe20-\ufe23\ufe30-\ufe44\ufe49-\ufe52\ufe54-\ufe66\ufe68-\ufe6b\ufe70-\ufe72\ufe74\ufe76-\ufefc\ufeff\uff01-\uff5e\uff61-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\uffe0-\uffe6\uffe8-\uffee\ufffd-\uffff",
"2.0": "\u0591-\u05a1\u05a3-\u05af\u05c4\u0f00-\u0f47\u0f49-\u0f69\u0f71-\u0f8b\u0f90-\u0f95\u0f97\u0f99-\u0fad\u0fb1-\u0fb7\u0fb9\u1e9b\u20ab\uac00-\ud7a3\ud800-\udfff\U0001fffe-\U0001ffff\U0002fffe-\U0002ffff\U0003fffe-\U0003ffff\U0004fffe-\U0004ffff\U0005fffe-\U0005ffff\U0006fffe-\U0006ffff\U0007fffe-\U0007ffff\U0008fffe-\U0008ffff\U0009fffe-\U0009ffff\U000afffe-\U000affff\U000bfffe-\U000bffff\U000cfffe-\U000cffff\U000dfffe-\U000dffff\U000efffe-\U0010ffff",
"2.1": "\u20ac\ufffc",
"3.0": "\u01f6-\u01f9\u0218-\u021f\u0222-\u0233\u02a9-\u02ad\u02df\u02ea-\u02ee\u0346-\u034e\u0362\u03d7\u03db\u03dd\u03df\u03e1\u0400\u040d\u0450\u045d\u0488-\u0489\u048c-\u048f\u04ec-\u04ed\u058a\u0653-\u0655\u06b8-\u06b9\u06bf\u06cf\u06fa-\u06fe\u0700-\u070d\u070f-\u072c\u0730-\u074a\u0780-\u07b0\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0df2-\u0df4\u0f6a\u0f96\u0fae-\u0fb0\u0fb8\u0fba-\u0fbc\u0fbe-\u0fcc\u0fcf\u1000-\u1021\u1023-\u1027\u1029-\u102a\u102c-\u1032\u1036-\u1039\u1040-\u1059\u1200-\u1206\u1208-\u1246\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1286\u1288\u128a-\u128d\u1290-\u12ae\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12ce\u12d0-\u12d6\u12d8-\u12ee\u12f0-\u130e\u1310\u1312-\u1315\u1318-\u131e\u1320-\u1346\u1348-\u135a\u1361-\u137c\u13a0-\u13f4\u1401-\u1676\u1680-\u169c\u16a0-\u16f0\u1780-\u17dc\u17e0-\u17e9\u1800-\u180e\u1810-\u1819\u1820-\u1877\u1880-\u18a9\u202f\u2048-\u204d\u20ad-\u20af\u20e2-\u20e3\u2139-\u213a\u2183\u21eb-\u21f3\u2301\u237b\u237d-\u239a\u2425-\u2426\u25f0-\u25f7\u2619\u2670-\u2671\u2800-\u28ff\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3038-\u303a\u303e\u31a0-\u31b7\u3400-\u4db5\ua000-\ua48c\ua490-\ua4a1\ua4a4-\ua4b3\ua4b5-\ua4c0\ua4c2-\ua4c4\ua4c6\ufb1d\ufff9-\ufffb",
"3.1": "\u03f4-\u03f5\ufdd0-\ufdef\U00010300-\U0001031e\U00010320-\U00010323\U00010330-\U0001034a\U00010400-\U00010425\U00010428-\U0001044d\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d12a-\U0001d1dd\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c0\U0001d4c2-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a3\U0001d6a8-\U0001d7c9\U0001d7ce-\U0001d7ff\U00020000-\U0002a6d6\U0002f800-\U0002fa1d\U000e0001\U000e0020-\U000e007f",
"3.2": "\u0220\u034f\u0363-\u036f\u03d8-\u03d9\u03f6\u048a-\u048b\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04ce\u0500-\u050f\u066e-\u066f\u07b1\u10f7-\u10f8\u1700-\u170c\u170e-\u1714\u1720-\u1736\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u2047\u204e-\u2052\u2057\u205f-\u2063\u2071\u20b0-\u20b1\u20e4-\u20ea\u213d-\u214b\u21f4-\u21ff\u22f2-\u22ff\u237c\u239b-\u23ce\u24eb-\u24fe\u2596-\u259f\u25f8-\u25ff\u2616-\u2617\u2672-\u267d\u2680-\u2689\u2768-\u2775\u27d0-\u27eb\u27f0-\u27ff\u2900-\u2aff\u303b-\u303d\u3095-\u3096\u309f-\u30a0\u30ff\u31f0-\u31ff\u3251-\u325f\u32b1-\u32bf\ua4a2-\ua4a3\ua4b4\ua4c1\ua4c5\ufa30-\ufa6a\ufdfc\ufe00-\ufe0f\ufe45-\ufe46\ufe73\uff5f-\uff60",
"4.0": "\u0221\u0234-\u0236\u02ae-\u02af\u02ef-\u02ff\u0350-\u0357\u035d-\u035f\u03f7-\u03fb\u0600-\u0603\u060d-\u0615\u0656-\u0658\u06ee-\u06ef\u06ff\u072d-\u072f\u074d-\u074f\u0904\u09bd\u0a01\u0a03\u0a8c\u0ae1-\u0ae3\u0af1\u0b35\u0b71\u0bf3-\u0bfa\u0cbc-\u0cbd\u17dd\u17f0-\u17f9\u1900-\u191c\u1920-\u192b\u1930-\u193b\u1940\u1944-\u196d\u1970-\u1974\u19e0-\u19ff\u1d00-\u1d6b\u2053-\u2054\u213b\u23cf-\u23d0\u24ff\u2614-\u2615\u268a-\u2691\u26a0-\u26a1\u2b00-\u2b0d\u321d-\u321e\u3250\u327c-\u327d\u32cc-\u32cf\u3377-\u337a\u33de-\u33df\u33ff\u4dc0-\u4dff\ufdfd\ufe47-\ufe48\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010100-\U00010102\U00010107-\U00010133\U00010137-\U0001013f\U00010380-\U0001039d\U0001039f\U00010426-\U00010427\U0001044e-\U0001049d\U000104a0-\U000104a9\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f\U0001d300-\U0001d356\U0001d4c1\U000e0100-\U000e01ef",
"4.1": "\u0237-\u0241\u0358-\u035c\u03fc-\u03ff\u04f6-\u04f7\u05a2\u05c5-\u05c7\u060b\u061e\u0659-\u065e\u0750-\u076d\u097d\u09ce\u0bb6\u0be6\u0fd0-\u0fd1\u10f9-\u10fa\u10fc\u1207\u1247\u1287\u12af\u12cf\u12ef\u130f\u131f\u1347\u135f-\u1360\u1380-\u1399\u1980-\u19a9\u19b0-\u19c9\u19d0-\u19d9\u19de-\u19df\u1a00-\u1a1b\u1a1e-\u1a1f\u1d6c-\u1dc3\u2055-\u2056\u2058-\u205e\u2090-\u2094\u20b2-\u20b5\u20eb\u213c\u214c\u23d1-\u23db\u2618\u267e-\u267f\u2692-\u269c\u26a2-\u26b1\u27c0-\u27c6\u2b0e-\u2b13\u2c00-\u2c2e\u2c30-\u2c5e\u2c80-\u2cea\u2cf9-\u2d25\u2d30-\u2d65\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e00-\u2e17\u2e1c-\u2e1d\u31c0-\u31cf\u327e\u9fa6-\u9fbb\ua700-\ua716\ua800-\ua82b\ufa70-\ufad9\ufe10-\ufe19\U00010140-\U0001018a\U000103a0-\U000103c3\U000103c8-\U000103d5\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a33\U00010a38-\U00010a3a\U00010a3f-\U00010a47\U00010a50-\U00010a58\U0001d200-\U0001d245\U0001d6a4-\U0001d6a5",
"5.0": "\u0242-\u024f\u037b-\u037d\u04cf\u04fa-\u04ff\u0510-\u0513\u05ba\u07c0-\u07fa\u097b-\u097c\u097e-\u097f\u0ce2-\u0ce3\u0cf1-\u0cf2\u1b00-\u1b4b\u1b50-\u1b7c\u1dc4-\u1dca\u1dfe-\u1dff\u20ec-\u20ef\u214d-\u214e\u2184\u23dc-\u23e7\u26b2\u27c7-\u27ca\u2b14-\u2b1a\u2b20-\u2b23\u2c60-\u2c6c\u2c74-\u2c77\ua717-\ua71a\ua720-\ua721\ua840-\ua877\U00010900-\U00010919\U0001091f\U00012000-\U0001236e\U00012400-\U00012462\U00012470-\U00012473\U0001d360-\U0001d371\U0001d7ca-\U0001d7cb",
"5.1": "\u0370-\u0373\u0376-\u0377\u03cf\u0487\u0514-\u0523\u0606-\u060a\u0616-\u061a\u063b-\u063f\u076e-\u077f\u0971-\u0972\u0a51\u0a75\u0b44\u0b62-\u0b63\u0bd0\u0c3d\u0c58-\u0c59\u0c62-\u0c63\u0c78-\u0c7f\u0d3d\u0d44\u0d62-\u0d63\u0d70-\u0d75\u0d79-\u0d7f\u0f6b-\u0f6c\u0fce\u0fd2-\u0fd4\u1022\u1028\u102b\u1033-\u1035\u103a-\u103f\u105a-\u1099\u109e-\u109f\u18aa\u1b80-\u1baa\u1bae-\u1bb9\u1c00-\u1c37\u1c3b-\u1c49\u1c4d-\u1c7f\u1dcb-\u1de6\u1e9c-\u1e9f\u1efa-\u1eff\u2064\u20f0\u214f\u2185-\u2188\u269d\u26b3-\u26bc\u26c0-\u26c3\u27cc\u27ec-\u27ef\u2b1b-\u2b1f\u2b24-\u2b4c\u2b50-\u2b54\u2c6d-\u2c6f\u2c71-\u2c73\u2c78-\u2c7d\u2de0-\u2dff\u2e18-\u2e1b\u2e1e-\u2e30\u312d\u31d0-\u31e3\u9fbc-\u9fc3\ua500-\ua62b\ua640-\ua65f\ua662-\ua673\ua67c-\ua697\ua71b-\ua71f\ua722-\ua78c\ua7fb-\ua7ff\ua880-\ua8c4\ua8ce-\ua8d9\ua900-\ua953\ua95f\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa5c-\uaa5f\ufe24-\ufe26\U00010190-\U0001019b\U000101d0-\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U00010920-\U00010939\U0001093f\U0001d129\U0001f000-\U0001f02b\U0001f030-\U0001f093",
"5.2": "\u0524-\u0525\u0800-\u082d\u0830-\u083e\u0900\u094e\u0955\u0979-\u097a\u09fb\u0fd5-\u0fd8\u109a-\u109d\u115a-\u115e\u11a3-\u11a7\u11fa-\u11ff\u1400\u1677-\u167f\u18b0-\u18f5\u19aa-\u19ab\u19da\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa0-\u1aad\u1cd0-\u1cf2\u1dfd\u20b6-\u20b8\u2150-\u2152\u2189\u23e8\u269e-\u269f\u26bd-\u26bf\u26c4-\u26cd\u26cf-\u26e1\u26e3\u26e8-\u26ff\u2757\u2b55-\u2b59\u2c70\u2c7e-\u2c7f\u2ceb-\u2cf1\u2e31\u3244-\u324f\u9fc4-\u9fcb\ua4d0-\ua4ff\ua6a0-\ua6f7\ua830-\ua839\ua8e0-\ua8fb\ua960-\ua97c\ua980-\ua9cd\ua9cf-\ua9d9\ua9de-\ua9df\uaa60-\uaa7b\uaa80-\uaac2\uaadb-\uaadf\uabc0-\uabed\uabf0-\uabf9\ud7b0-\ud7c6\ud7cb-\ud7fb\ufa6b-\ufa6d\U00010840-\U00010855\U00010857-\U0001085f\U0001091a-\U0001091b\U00010a60-\U00010a7f\U00010b00-\U00010b35\U00010b39-\U00010b55\U00010b58-\U00010b72\U00010b78-\U00010b7f\U00010c00-\U00010c48\U00010e60-\U00010e7e\U00011080-\U000110c1\U00013000-\U0001342e\U0001f100-\U0001f10a\U0001f110-\U0001f12e\U0001f131\U0001f13d\U0001f13f\U0001f142\U0001f146\U0001f14a-\U0001f14e\U0001f157\U0001f15f\U0001f179\U0001f17b-\U0001f17c\U0001f17f\U0001f18a-\U0001f18d\U0001f190\U0001f200\U0001f210-\U0001f231\U0001f240-\U0001f248\U0002a700-\U0002b734",
"6.0": "\u0526-\u0527\u0620\u065f\u0840-\u085b\u085e\u093a-\u093b\u094f\u0956-\u0957\u0973-\u0977\u0b72-\u0b77\u0d29\u0d3a\u0d4e\u0f8c-\u0f8f\u0fd9-\u0fda\u135d-\u135e\u1bc0-\u1bf3\u1bfc-\u1bff\u1dfc\u2095-\u209c\u20b9\u23e9-\u23f3\u26ce\u26e2\u26e4-\u26e7\u2705\u270a-\u270b\u2728\u274c\u274e\u2753-\u2755\u275f-\u2760\u2795-\u2797\u27b0\u27bf\u27ce-\u27cf\u2d70\u2d7f\u31b8-\u31ba\ua660-\ua661\ua78d-\ua78e\ua790-\ua791\ua7a0-\ua7a9\ua7fa\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\ufbb2-\ufbc1\U00011000-\U0001104d\U00011052-\U0001106f\U00016800-\U00016a38\U0001b000-\U0001b001\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0be\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0df\U0001f130\U0001f132-\U0001f13c\U0001f13e\U0001f140-\U0001f141\U0001f143-\U0001f145\U0001f147-\U0001f149\U0001f14f-\U0001f156\U0001f158-\U0001f15e\U0001f160-\U0001f169\U0001f170-\U0001f178\U0001f17a\U0001f17d-\U0001f17e\U0001f180-\U0001f189\U0001f18e-\U0001f18f\U0001f191-\U0001f19a\U0001f1e6-\U0001f1ff\U0001f201-\U0001f202\U0001f232-\U0001f23a\U0001f250-\U0001f251\U0001f300-\U0001f320\U0001f330-\U0001f335\U0001f337-\U0001f37c\U0001f380-\U0001f393\U0001f3a0-\U0001f3c4\U0001f3c6-\U0001f3ca\U0001f3e0-\U0001f3f0\U0001f400-\U0001f43e\U0001f440\U0001f442-\U0001f4f7\U0001f4f9-\U0001f4fc\U0001f500-\U0001f53d\U0001f550-\U0001f567\U0001f5fb-\U0001f5ff\U0001f601-\U0001f610\U0001f612-\U0001f614\U0001f616\U0001f618\U0001f61a\U0001f61c-\U0001f61e\U0001f620-\U0001f625\U0001f628-\U0001f62b\U0001f62d\U0001f630-\U0001f633\U0001f635-\U0001f640\U0001f645-\U0001f64f\U0001f680-\U0001f6c5\U0001f700-\U0001f773\U0002b740-\U0002b81d",
"6.1": "\u058f\u0604\u08a0\u08a2-\u08ac\u08e4-\u08fe\u0af0\u0ede-\u0edf\u10c7\u10cd\u10fd-\u10ff\u1bab-\u1bad\u1bba-\u1bbf\u1cc0-\u1cc7\u1cf3-\u1cf6\u27cb\u27cd\u2cf2-\u2cf3\u2d27\u2d2d\u2d66-\u2d67\u2e32-\u2e3b\u9fcc\ua674-\ua67b\ua69f\ua792-\ua793\ua7aa\ua7f8-\ua7f9\uaae0-\uaaf6\ufa2e-\ufa2f\U00010980-\U000109b7\U000109be-\U000109bf\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U00011143\U00011180-\U000111c8\U000111d0-\U000111d9\U00011680-\U000116b7\U000116c0-\U000116c9\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U0001eef0-\U0001eef1\U0001f16a-\U0001f16b\U0001f540-\U0001f543\U0001f600\U0001f611\U0001f615\U0001f617\U0001f619\U0001f61b\U0001f61f\U0001f626-\U0001f627\U0001f62c\U0001f62e-\U0001f62f\U0001f634",
"^1.1": "\u01f6-\u01f9\u0218-\u024f\u02a9-\u02af\u02df\u02ea-\u02ff\u0346-\u035f\u0362-\u0373\u0376-\u0379\u037b-\u037d\u037f-\u0383\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u0560\u0588\u058a-\u05af\u05ba\u05c4-\u05cf\u05eb-\u05ef\u05f5-\u060b\u060d-\u061a\u061c-\u061e\u0620\u063b-\u063f\u0653-\u065f\u066e-\u066f\u06b8-\u06b9\u06bf\u06cf\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u094f\u0955-\u0957\u0971-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09fb-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b71-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf3-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ede-\u109f\u10c6-\u10cf\u10f7-\u10fa\u10fc-\u10ff\u115a-\u115e\u11a3-\u11a7\u11fa-\u1dff\u1e9b-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u202f\u2047-\u2069\u2071-\u2073\u208f-\u209f\u20ab-\u20cf\u20e2-\u20ff\u2139-\u2152\u2183-\u218f\u21eb-\u21ff\u22f2-\u22ff\u2301\u237b-\u23ff\u2425-\u243f\u244b-\u245f\u24eb-\u24ff\u2596-\u259f\u25f0-\u25ff\u2614-\u2619\u2670-\u2700\u2705\u270a-\u270b\u2728\u274c\u274e\u2753-\u2755\u2757\u275f-\u2760\u2768-\u2775\u2795-\u2797\u27b0\u27bf-\u2fff\u3038-\u303e\u3040\u3095-\u3098\u309f-\u30a0\u30ff-\u3104\u312d-\u3130\u318f\u31a0-\u31ff\u321d-\u321f\u3244-\u325f\u327c-\u327e\u32b1-\u32bf\u32cc-\u32cf\u32ff\u3377-\u337a\u33de-\u33df\u33ff-\u4dff\u9fa6-\udfff\ufa2e-\ufaff\ufb07-\ufb12\ufb18-\ufb1d\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbb2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfc-\ufe1f\ufe24-\ufe2f\ufe45-\ufe48\ufe53\ufe67\ufe6c-\ufe6f\ufe73\ufe75\ufefd-\ufefe\uff00\uff5f-\uff60\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufffc\U00010000-\U0010ffff",
"^2.0": "\u0000-\u0590\u05a2\u05b0-\u05c3\u05c5-\u0eff\u0f48\u0f6a-\u0f70\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u1e9a\u1e9c-\u20aa\u20ac-\uabff\ud7a4-\ud7ff\ue000-\U0001fffd\U00020000-\U0002fffd\U00030000-\U0003fffd\U00040000-\U0004fffd\U00050000-\U0005fffd\U00060000-\U0006fffd\U00070000-\U0007fffd\U00080000-\U0008fffd\U00090000-\U0009fffd\U000a0000-\U000afffd\U000b0000-\U000bfffd\U000c0000-\U000cfffd\U000d0000-\U000dfffd\U000e0000-\U000efffd",
"^2.1": "\u0000-\u20ab\u20ad-\ufffb\ufffd-\U0010ffff",
"^3.0": "\u0000-\u01f5\u01fa-\u0217\u0220-\u0221\u0234-\u02a8\u02ae-\u02de\u02e0-\u02e9\u02ef-\u0345\u034f-\u0361\u0363-\u03d6\u03d8-\u03da\u03dc\u03de\u03e0\u03e2-\u03ff\u0401-\u040c\u040e-\u044f\u0451-\u045c\u045e-\u0487\u048a-\u048b\u0490-\u04eb\u04ee-\u0589\u058b-\u0652\u0656-\u06b7\u06ba-\u06be\u06c0-\u06ce\u06d0-\u06f9\u06ff\u070e\u072d-\u072f\u074b-\u077f\u07b1-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0df1\u0df5-\u0f69\u0f6b-\u0f95\u0f97-\u0fad\u0fb1-\u0fb7\u0fb9\u0fbd\u0fcd-\u0fce\u0fd0-\u0fff\u1022\u1028\u102b\u1033-\u1035\u103a-\u103f\u105a-\u11ff\u1207\u1247\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1287\u1289\u128e-\u128f\u12af\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12cf\u12d7\u12ef\u130f\u1311\u1316-\u1317\u131f\u1347\u135b-\u1360\u137d-\u139f\u13f5-\u1400\u1677-\u167f\u169d-\u169f\u16f1-\u177f\u17dd-\u17df\u17ea-\u17ff\u180f\u181a-\u181f\u1878-\u187f\u18aa-\u202e\u2030-\u2047\u204e-\u20ac\u20b0-\u20e1\u20e4-\u2138\u213b-\u2182\u2184-\u21ea\u21f4-\u2300\u2302-\u237a\u237c\u239b-\u2424\u2427-\u25ef\u25f8-\u2618\u261a-\u266f\u2672-\u27ff\u2900-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u3037\u303b-\u303d\u303f-\u319f\u31b8-\u33ff\u4db6-\u9fff\ua48d-\ua48f\ua4a2-\ua4a3\ua4b4\ua4c1\ua4c5\ua4c7-\ufb1c\ufb1e-\ufff8\ufffc-\U0010ffff",
"^3.1": "\u0000-\u03f3\u03f6-\ufdcf\ufdf0-\U000102ff\U0001031f\U00010324-\U0001032f\U0001034b-\U000103ff\U00010426-\U00010427\U0001044e-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d129\U0001d1de-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c1\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a4-\U0001d6a7\U0001d7ca-\U0001d7cd\U0001d800-\U0001ffff\U0002a6d7-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U0010ffff",
"^3.2": "\u0000-\u021f\u0221-\u034e\u0350-\u0362\u0370-\u03d7\u03da-\u03f5\u03f7-\u0489\u048c-\u04c4\u04c7-\u04c8\u04cb-\u04cc\u04cf-\u04ff\u0510-\u066d\u0670-\u07b0\u07b2-\u10f6\u10f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u2046\u2048-\u204d\u2053-\u2056\u2058-\u205e\u2064-\u2070\u2072-\u20af\u20b2-\u20e3\u20eb-\u213c\u214c-\u21f3\u2200-\u22f1\u2300-\u237b\u237d-\u239a\u23cf-\u24ea\u24ff-\u2595\u25a0-\u25f7\u2600-\u2615\u2618-\u2671\u267e-\u267f\u268a-\u2767\u2776-\u27cf\u27ec-\u27ef\u2800-\u28ff\u2b00-\u303a\u303e-\u3094\u3097-\u309e\u30a1-\u30fe\u3100-\u31ef\u3200-\u3250\u3260-\u32b0\u32c0-\ua4a1\ua4a4-\ua4b3\ua4b5-\ua4c0\ua4c2-\ua4c4\ua4c6-\ufa2f\ufa6b-\ufdfb\ufdfd-\ufdff\ufe10-\ufe44\ufe47-\ufe72\ufe74-\uff5e\uff61-\U0010ffff",
"^4.0": "\u0000-\u0220\u0222-\u0233\u0237-\u02ad\u02b0-\u02ee\u0300-\u034f\u0358-\u035c\u0360-\u03f6\u03fc-\u05ff\u0604-\u060c\u0616-\u0655\u0659-\u06ed\u06f0-\u06fe\u0700-\u072c\u0730-\u074c\u0750-\u0903\u0905-\u09bc\u09be-\u0a00\u0a02\u0a04-\u0a8b\u0a8d-\u0ae0\u0ae4-\u0af0\u0af2-\u0b34\u0b36-\u0b70\u0b72-\u0bf2\u0bfb-\u0cbb\u0cbe-\u17dc\u17de-\u17ef\u17fa-\u18ff\u191d-\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u19df\u1a00-\u1cff\u1d6c-\u2052\u2055-\u213a\u213c-\u23ce\u23d1-\u24fe\u2500-\u2613\u2616-\u2689\u2692-\u269f\u26a2-\u2aff\u2b0e-\u321c\u321f-\u324f\u3251-\u327b\u327e-\u32cb\u32d0-\u3376\u337b-\u33dd\u33e0-\u33fe\u3400-\u4dbf\u4e00-\ufdfc\ufdfe-\ufe46\ufe49-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U00010140-\U0001037f\U0001039e\U000103a0-\U00010425\U00010428-\U0001044d\U0001049e-\U0001049f\U000104aa-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010840-\U0001d2ff\U0001d357-\U0001d4c0\U0001d4c2-\U000e00ff\U000e01f0-\U0010ffff",
"^4.1": "\u0000-\u0236\u0242-\u0357\u035d-\u03fb\u0400-\u04f5\u04f8-\u05a1\u05a3-\u05c4\u05c8-\u060a\u060c-\u061d\u061f-\u0658\u065f-\u074f\u076e-\u097c\u097e-\u09cd\u09cf-\u0bb5\u0bb7-\u0be5\u0be7-\u0fcf\u0fd2-\u10f8\u10fb\u10fd-\u1206\u1208-\u1246\u1248-\u1286\u1288-\u12ae\u12b0-\u12ce\u12d0-\u12ee\u12f0-\u130e\u1310-\u131e\u1320-\u1346\u1348-\u135e\u1361-\u137f\u139a-\u197f\u19aa-\u19af\u19ca-\u19cf\u19da-\u19dd\u19e0-\u19ff\u1a1c-\u1a1d\u1a20-\u1d6b\u1dc4-\u2054\u2057\u205f-\u208f\u2095-\u20b1\u20b6-\u20ea\u20ec-\u213b\u213d-\u214b\u214d-\u23d0\u23dc-\u2617\u2619-\u267d\u2680-\u2691\u269d-\u26a1\u26b2-\u27bf\u27c7-\u2b0d\u2b14-\u2bff\u2c2f\u2c5f-\u2c7f\u2ceb-\u2cf8\u2d26-\u2d2f\u2d66-\u2d6e\u2d70-\u2d7f\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf-\u2dff\u2e18-\u2e1b\u2e1e-\u31bf\u31d0-\u327d\u327f-\u9fa5\u9fbc-\ua6ff\ua717-\ua7ff\ua82c-\ufa6f\ufada-\ufe0f\ufe1a-\U0001013f\U0001018b-\U0001039f\U000103c4-\U000103c7\U000103d6-\U000109ff\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a34-\U00010a37\U00010a3b-\U00010a3e\U00010a48-\U00010a4f\U00010a59-\U0001d1ff\U0001d246-\U0001d6a3\U0001d6a6-\U0010ffff",
"^5.0": "\u0000-\u0241\u0250-\u037a\u037e-\u04ce\u04d0-\u04f9\u0500-\u050f\u0514-\u05b9\u05bb-\u07bf\u07fb-\u097a\u097d\u0980-\u0ce1\u0ce4-\u0cf0\u0cf3-\u1aff\u1b4c-\u1b4f\u1b7d-\u1dc3\u1dcb-\u1dfd\u1e00-\u20eb\u20f0-\u214c\u214f-\u2183\u2185-\u23db\u23e8-\u26b1\u26b3-\u27c6\u27cb-\u2b13\u2b1b-\u2b1f\u2b24-\u2c5f\u2c6d-\u2c73\u2c78-\ua716\ua71b-\ua71f\ua722-\ua83f\ua878-\U000108ff\U0001091a-\U0001091e\U00010920-\U00011fff\U0001236f-\U000123ff\U00012463-\U0001246f\U00012474-\U0001d35f\U0001d372-\U0001d7c9\U0001d7cc-\U0010ffff",
"^5.1": "\u0000-\u036f\u0374-\u0375\u0378-\u03ce\u03d0-\u0486\u0488-\u0513\u0524-\u0605\u060b-\u0615\u061b-\u063a\u0640-\u076d\u0780-\u0970\u0973-\u0a50\u0a52-\u0a74\u0a76-\u0b43\u0b45-\u0b61\u0b64-\u0bcf\u0bd1-\u0c3c\u0c3e-\u0c57\u0c5a-\u0c61\u0c64-\u0c77\u0c80-\u0d3c\u0d3e-\u0d43\u0d45-\u0d61\u0d64-\u0d6f\u0d76-\u0d78\u0d80-\u0f6a\u0f6d-\u0fcd\u0fcf-\u0fd1\u0fd5-\u1021\u1023-\u1027\u1029-\u102a\u102c-\u1032\u1036-\u1039\u1040-\u1059\u109a-\u109d\u10a0-\u18a9\u18ab-\u1b7f\u1bab-\u1bad\u1bba-\u1bff\u1c38-\u1c3a\u1c4a-\u1c4c\u1c80-\u1dca\u1de7-\u1e9b\u1ea0-\u1ef9\u1f00-\u2063\u2065-\u20ef\u20f1-\u214e\u2150-\u2184\u2189-\u269c\u269e-\u26b2\u26bd-\u26bf\u26c4-\u27cb\u27cd-\u27eb\u27f0-\u2b1a\u2b20-\u2b23\u2b4d-\u2b4f\u2b55-\u2c6c\u2c70\u2c74-\u2c77\u2c7e-\u2ddf\u2e00-\u2e17\u2e1c-\u2e1d\u2e31-\u312c\u312e-\u31cf\u31e4-\u9fbb\u9fc4-\ua4ff\ua62c-\ua63f\ua660-\ua661\ua674-\ua67b\ua698-\ua71a\ua720-\ua721\ua78d-\ua7fa\ua800-\ua87f\ua8c5-\ua8cd\ua8da-\ua8ff\ua954-\ua95e\ua960-\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaa60-\ufe23\ufe27-\U0001018f\U0001019c-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U0001091f\U0001093a-\U0001093e\U00010940-\U0001d128\U0001d12a-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0010ffff",
"^5.2": "\u0000-\u0523\u0526-\u07ff\u082e-\u082f\u083f-\u08ff\u0901-\u094d\u094f-\u0954\u0956-\u0978\u097b-\u09fa\u09fc-\u0fd4\u0fd9-\u1099\u109e-\u1159\u115f-\u11a2\u11a8-\u11f9\u1200-\u13ff\u1401-\u1676\u1680-\u18af\u18f6-\u19a9\u19ac-\u19d9\u19db-\u1a1f\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1ccf\u1cf3-\u1dfc\u1dfe-\u20b5\u20b9-\u214f\u2153-\u2188\u218a-\u23e7\u23e9-\u269d\u26a0-\u26bc\u26c0-\u26c3\u26ce\u26e2\u26e4-\u26e7\u2700-\u2756\u2758-\u2b54\u2b5a-\u2c6f\u2c71-\u2c7d\u2c80-\u2cea\u2cf2-\u2e30\u2e32-\u3243\u3250-\u9fc3\u9fcc-\ua4cf\ua500-\ua69f\ua6f8-\ua82f\ua83a-\ua8df\ua8fc-\ua95f\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9e0-\uaa5f\uaa7c-\uaa7f\uaac3-\uaada\uaae0-\uabbf\uabee-\uabef\uabfa-\ud7af\ud7c7-\ud7ca\ud7fc-\ufa6a\ufa6e-\U0001083f\U00010856\U00010860-\U00010919\U0001091c-\U00010a5f\U00010a80-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b80-\U00010bff\U00010c49-\U00010e5f\U00010e7f-\U0001107f\U000110c2-\U00012fff\U0001342f-\U0001f0ff\U0001f10b-\U0001f10f\U0001f12f-\U0001f130\U0001f132-\U0001f13c\U0001f13e\U0001f140-\U0001f141\U0001f143-\U0001f145\U0001f147-\U0001f149\U0001f14f-\U0001f156\U0001f158-\U0001f15e\U0001f160-\U0001f178\U0001f17a\U0001f17d-\U0001f17e\U0001f180-\U0001f189\U0001f18e-\U0001f18f\U0001f191-\U0001f1ff\U0001f201-\U0001f20f\U0001f232-\U0001f23f\U0001f249-\U0002a6ff\U0002b735-\U0010ffff",
"^6.0": "\u0000-\u0525\u0528-\u061f\u0621-\u065e\u0660-\u083f\u085c-\u085d\u085f-\u0939\u093c-\u094e\u0950-\u0955\u0958-\u0972\u0978-\u0b71\u0b78-\u0d28\u0d2a-\u0d39\u0d3b-\u0d4d\u0d4f-\u0f8b\u0f90-\u0fd8\u0fdb-\u135c\u135f-\u1bbf\u1bf4-\u1bfb\u1c00-\u1dfb\u1dfd-\u2094\u209d-\u20b8\u20ba-\u23e8\u23f4-\u26cd\u26cf-\u26e1\u26e3\u26e8-\u2704\u2706-\u2709\u270c-\u2727\u2729-\u274b\u274d\u274f-\u2752\u2756-\u275e\u2761-\u2794\u2798-\u27af\u27b1-\u27be\u27c0-\u27cd\u27d0-\u2d6f\u2d71-\u2d7e\u2d80-\u31b7\u31bb-\ua65f\ua662-\ua78c\ua78f\ua792-\ua79f\ua7aa-\ua7f9\ua7fb-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f-\ufbb1\ufbc2-\U00010fff\U0001104e-\U00011051\U00011070-\U000167ff\U00016a39-\U0001afff\U0001b002-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0bf-\U0001f0c0\U0001f0d0\U0001f0e0-\U0001f12f\U0001f131\U0001f13d\U0001f13f\U0001f142\U0001f146\U0001f14a-\U0001f14e\U0001f157\U0001f15f\U0001f16a-\U0001f16f\U0001f179\U0001f17b-\U0001f17c\U0001f17f\U0001f18a-\U0001f18d\U0001f190\U0001f19b-\U0001f1e5\U0001f200\U0001f203-\U0001f231\U0001f23b-\U0001f24f\U0001f252-\U0001f2ff\U0001f321-\U0001f32f\U0001f336\U0001f37d-\U0001f37f\U0001f394-\U0001f39f\U0001f3c5\U0001f3cb-\U0001f3df\U0001f3f1-\U0001f3ff\U0001f43f\U0001f441\U0001f4f8\U0001f4fd-\U0001f4ff\U0001f53e-\U0001f54f\U0001f568-\U0001f5fa\U0001f600\U0001f611\U0001f615\U0001f617\U0001f619\U0001f61b\U0001f61f\U0001f626-\U0001f627\U0001f62c\U0001f62e-\U0001f62f\U0001f634\U0001f641-\U0001f644\U0001f650-\U0001f67f\U0001f6c6-\U0001f6ff\U0001f774-\U0002b73f\U0002b81e-\U0010ffff",
"^6.1": "\u0000-\u058e\u0590-\u0603\u0605-\u089f\u08a1\u08ad-\u08e3\u08ff-\u0aef\u0af1-\u0edd\u0ee0-\u10c6\u10c8-\u10cc\u10ce-\u10fc\u1100-\u1baa\u1bae-\u1bb9\u1bc0-\u1cbf\u1cc8-\u1cf2\u1cf7-\u27ca\u27cc\u27ce-\u2cf1\u2cf4-\u2d26\u2d28-\u2d2c\u2d2e-\u2d65\u2d68-\u2e31\u2e3c-\u9fcb\u9fcd-\ua673\ua67c-\ua69e\ua6a0-\ua791\ua794-\ua7a9\ua7ab-\ua7f7\ua7fa-\uaadf\uaaf7-\ufa2d\ufa30-\U0001097f\U000109b8-\U000109bd\U000109c0-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011144-\U0001117f\U000111c9-\U000111cf\U000111da-\U0001167f\U000116b8-\U000116bf\U000116ca-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001f169\U0001f16c-\U0001f53f\U0001f544-\U0001f5ff\U0001f601-\U0001f610\U0001f612-\U0001f614\U0001f616\U0001f618\U0001f61a\U0001f61c-\U0001f61e\U0001f620-\U0001f625\U0001f628-\U0001f62b\U0001f62d\U0001f630-\U0001f633\U0001f635-\U0010ffff",
"^na": "\u0000-\u0377\u037a-\u037e\u0384-\u038a\u038c\u038e-\u03a1\u03a3-\u0527\u0531-\u0556\u0559-\u055f\u0561-\u0587\u0589-\u058a\u058f\u0591-\u05c7\u05d0-\u05ea\u05f0-\u05f4\u0600-\u0604\u0606-\u061b\u061e-\u070d\u070f-\u074a\u074d-\u07b1\u07c0-\u07fa\u0800-\u082d\u0830-\u083e\u0840-\u085b\u085e\u08a0\u08a2-\u08ac\u08e4-\u08fe\u0900-\u0977\u0979-\u097f\u0981-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09fb\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0af1\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b77\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bfa\u0c01-\u0c03\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c59\u0c60-\u0c63\u0c66-\u0c6f\u0c78-\u0c7f\u0c82-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d02-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d57\u0d60-\u0d63\u0d66-\u0d75\u0d79-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0df2-\u0df4\u0e01-\u0e3a\u0e3f-\u0e5b\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00-\u0f47\u0f49-\u0f6c\u0f71-\u0f97\u0f99-\u0fbc\u0fbe-\u0fcc\u0fce-\u0fda\u1000-\u10c5\u10c7\u10cd\u10d0-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u137c\u1380-\u1399\u13a0-\u13f4\u1400-\u169c\u16a0-\u16f0\u1700-\u170c\u170e-\u1714\u1720-\u1736\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17dd\u17e0-\u17e9\u17f0-\u17f9\u1800-\u180e\u1810-\u1819\u1820-\u1877\u1880-\u18aa\u18b0-\u18f5\u1900-\u191c\u1920-\u192b\u1930-\u193b\u1940\u1944-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u19de-\u1a1b\u1a1e-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa0-\u1aad\u1b00-\u1b4b\u1b50-\u1b7c\u1b80-\u1bf3\u1bfc-\u1c37\u1c3b-\u1c49\u1c4d-\u1c7f\u1cc0-\u1cc7\u1cd0-\u1cf6\u1d00-\u1de6\u1dfc-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fc4\u1fc6-\u1fd3\u1fd6-\u1fdb\u1fdd-\u1fef\u1ff2-\u1ff4\u1ff6-\u1ffe\u2000-\u2064\u206a-\u2071\u2074-\u208e\u2090-\u209c\u20a0-\u20b9\u20d0-\u20f0\u2100-\u2189\u2190-\u23f3\u2400-\u2426\u2440-\u244a\u2460-\u26ff\u2701-\u2b4c\u2b50-\u2b59\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2cf3\u2cf9-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f-\u2d70\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2e3b\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3000-\u303f\u3041-\u3096\u3099-\u30ff\u3105-\u312d\u3131-\u318e\u3190-\u31ba\u31c0-\u31e3\u31f0-\u321e\u3220-\u32fe\u3300-\u4db5\u4dc0-\u9fcc\ua000-\ua48c\ua490-\ua4c6\ua4d0-\ua62b\ua640-\ua697\ua69f-\ua6f7\ua700-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7f8-\ua82b\ua830-\ua839\ua840-\ua877\ua880-\ua8c4\ua8ce-\ua8d9\ua8e0-\ua8fb\ua900-\ua953\ua95f-\ua97c\ua980-\ua9cd\ua9cf-\ua9d9\ua9de-\ua9df\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa5c-\uaa7b\uaa80-\uaac2\uaadb-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\ud800-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbc1\ufbd3-\ufd3f\ufd50-\ufd8f\ufd92-\ufdc7\ufdd0-\ufdfd\ufe00-\ufe19\ufe20-\ufe26\ufe30-\ufe52\ufe54-\ufe66\ufe68-\ufe6b\ufe70-\ufe74\ufe76-\ufefc\ufeff\uff01-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\uffe0-\uffe6\uffe8-\uffee\ufff9-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010100-\U00010102\U00010107-\U00010133\U00010137-\U0001018a\U00010190-\U0001019b\U000101d0-\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031e\U00010320-\U00010323\U00010330-\U0001034a\U00010380-\U0001039d\U0001039f-\U000103c3\U000103c8-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010857-\U0001085f\U00010900-\U0001091b\U0001091f-\U00010939\U0001093f\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a33\U00010a38-\U00010a3a\U00010a3f-\U00010a47\U00010a50-\U00010a58\U00010a60-\U00010a7f\U00010b00-\U00010b35\U00010b39-\U00010b55\U00010b58-\U00010b72\U00010b78-\U00010b7f\U00010c00-\U00010c48\U00010e60-\U00010e7e\U00011000-\U0001104d\U00011052-\U0001106f\U00011080-\U000110c1\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U00011143\U00011180-\U000111c8\U000111d0-\U000111d9\U00011680-\U000116b7\U000116c0-\U000116c9\U00012000-\U0001236e\U00012400-\U00012462\U00012470-\U00012473\U00013000-\U0001342e\U00016800-\U00016a38\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U0001b000-\U0001b001\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d1dd\U0001d200-\U0001d245\U0001d300-\U0001d356\U0001d360-\U0001d371\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U0001eef0-\U0001eef1\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0be\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0df\U0001f100-\U0001f10a\U0001f110-\U0001f12e\U0001f130-\U0001f16b\U0001f170-\U0001f19a\U0001f1e6-\U0001f202\U0001f210-\U0001f23a\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f300-\U0001f320\U0001f330-\U0001f335\U0001f337-\U0001f37c\U0001f380-\U0001f393\U0001f3a0-\U0001f3c4\U0001f3c6-\U0001f3ca\U0001f3e0-\U0001f3f0\U0001f400-\U0001f43e\U0001f440\U0001f442-\U0001f4f7\U0001f4f9-\U0001f4fc\U0001f500-\U0001f53d\U0001f540-\U0001f543\U0001f550-\U0001f567\U0001f5fb-\U0001f640\U0001f645-\U0001f64f\U0001f680-\U0001f6c5\U0001f700-\U0001f773\U0001fffe-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002f800-\U0002fa1d\U0002fffe-\U0002ffff\U0003fffe-\U0003ffff\U0004fffe-\U0004ffff\U0005fffe-\U0005ffff\U0006fffe-\U0006ffff\U0007fffe-\U0007ffff\U0008fffe-\U0008ffff\U0009fffe-\U0009ffff\U000afffe-\U000affff\U000bfffe-\U000bffff\U000cfffe-\U000cffff\U000dfffe-\U000dffff\U000e0001\U000e0020-\U000e007f\U000e0100-\U000e01ef\U000efffe-\U0010ffff",
"na": "\u0378-\u0379\u037f-\u0383\u038b\u038d\u03a2\u0528-\u0530\u0557-\u0558\u0560\u0588\u058b-\u058e\u0590\u05c8-\u05cf\u05eb-\u05ef\u05f5-\u05ff\u0605\u061c-\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07ff\u082e-\u082f\u083f\u085c-\u085d\u085f-\u089f\u08a1\u08ad-\u08e3\u08ff\u0978\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09fc-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a76-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5a-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c80-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0d01\u0d04\u0d0d\u0d11\u0d3b-\u0d3c\u0d45\u0d49\u0d4f-\u0d56\u0d58-\u0d5f\u0d64-\u0d65\u0d76-\u0d78\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f5-\u13ff\u169d-\u169f\u16f1-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1878-\u187f\u18ab-\u18af\u18f6-\u18ff\u191d-\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c80-\u1cbf\u1cc8-\u1ccf\u1cf7-\u1cff\u1de7-\u1dfb\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065-\u2069\u2072-\u2073\u208f\u209d-\u209f\u20ba-\u20cf\u20f1-\u20ff\u218a-\u218f\u23f4-\u23ff\u2427-\u243f\u244b-\u245f\u2700\u2b4d-\u2b4f\u2b5a-\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e3c-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u312e-\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9fcd-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua698-\ua69e\ua6f8-\ua6ff\ua78f\ua794-\ua79f\ua7ab-\ua7f7\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c5-\ua8cd\ua8da-\ua8df\ua8fc-\ua8ff\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9e0-\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaa7c-\uaa7f\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f-\uabbf\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdcf\ufdfe-\ufdff\ufe1a-\ufe1f\ufe27-\ufe2f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018b-\U0001018f\U0001019c-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102ff\U0001031f\U00010324-\U0001032f\U0001034b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U00010860-\U000108ff\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bd\U000109c0-\U000109ff\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a34-\U00010a37\U00010a3b-\U00010a3e\U00010a48-\U00010a4f\U00010a59-\U00010a5f\U00010a80-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b80-\U00010bff\U00010c49-\U00010e5f\U00010e7f-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107f\U000110c2-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011144-\U0001117f\U000111c9-\U000111cf\U000111da-\U0001167f\U000116b8-\U000116bf\U000116ca-\U00011fff\U0001236f-\U000123ff\U00012463-\U0001246f\U00012474-\U00012fff\U0001342f-\U000167ff\U00016a39-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U0001afff\U0001b002-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1de-\U0001d1ff\U0001d246-\U0001d2ff\U0001d357-\U0001d35f\U0001d372-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001d800-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0bf-\U0001f0c0\U0001f0d0\U0001f0e0-\U0001f0ff\U0001f10b-\U0001f10f\U0001f12f\U0001f16c-\U0001f16f\U0001f19b-\U0001f1e5\U0001f203-\U0001f20f\U0001f23b-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f2ff\U0001f321-\U0001f32f\U0001f336\U0001f37d-\U0001f37f\U0001f394-\U0001f39f\U0001f3c5\U0001f3cb-\U0001f3df\U0001f3f1-\U0001f3ff\U0001f43f\U0001f441\U0001f4f8\U0001f4fd-\U0001f4ff\U0001f53e-\U0001f53f\U0001f544-\U0001f54f\U0001f568-\U0001f5fa\U0001f641-\U0001f644\U0001f650-\U0001f67f\U0001f6c6-\U0001f6ff\U0001f774-\U0001fffd\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002f7ff\U0002fa1e-\U0002fffd\U00030000-\U0003fffd\U00040000-\U0004fffd\U00050000-\U0005fffd\U00060000-\U0006fffd\U00070000-\U0007fffd\U00080000-\U0008fffd\U00090000-\U0009fffd\U000a0000-\U000afffd\U000b0000-\U000bfffd\U000c0000-\U000cfffd\U000d0000-\U000dfffd\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000efffd"
}
| 1,300.441176
| 8,087
| 0.801176
| 5,921
| 44,215
| 5.98176
| 0.483702
| 0.001016
| 0.000847
| 0.001129
| 0.509007
| 0.479643
| 0.44915
| 0.405274
| 0.377266
| 0.347789
| 0
| 0.468218
| 0.004071
| 44,215
| 33
| 8,088
| 1,339.848485
| 0.336096
| 0.000656
| 0
| 0
| 1
| 0.83871
| 0.991059
| 0.988615
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.032258
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a77ddd79e29e81a0a27e327023eb27f7ff2af1e
| 16,065
|
py
|
Python
|
pyradiomicsFeatureExtraction.py
|
ntnu-mr-cancer/SegmentationQualityControl
|
ee8531eb867d3046b287790303d0bbcbc8b61504
|
[
"MIT"
] | 2
|
2021-07-22T09:14:00.000Z
|
2022-01-07T10:07:17.000Z
|
pyradiomicsFeatureExtraction.py
|
ntnu-mr-cancer/SegmentationQualityControl
|
ee8531eb867d3046b287790303d0bbcbc8b61504
|
[
"MIT"
] | null | null | null |
pyradiomicsFeatureExtraction.py
|
ntnu-mr-cancer/SegmentationQualityControl
|
ee8531eb867d3046b287790303d0bbcbc8b61504
|
[
"MIT"
] | 3
|
2021-03-10T13:45:25.000Z
|
2022-01-28T13:47:31.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 12 15:55:07 2018
@author: mattjise
Modified by Mohammed Sunoqrot March 2020
"""
import radiomics
import SimpleITK as sitk
import numpy as np
import math
import json
import os
import sys
def getSettings(image_array_in,mask_array_in,nr_bins):
intensity_range = np.max(image_array_in[mask_array_in == 1])-np.min(image_array_in[mask_array_in == 1])
settings = {}
settings['binWidth'] = intensity_range/64
settings['correctMask'] = True
return settings
def getSliceFromMask(mask_in,slice_nr_in):
mask_array = sitk.GetArrayFromImage(mask_in)
new_mask_array = np.zeros_like(mask_array)
new_mask_array[:,:,:] = 0
new_mask_array[slice_nr_in,:,:] = mask_array[slice_nr_in,:,:]
new_mask = sitk.GetImageFromArray(new_mask_array)
new_mask.CopyInformation(mask)
return new_mask
# Get paths
with open(os.path.join(os.path.dirname(sys.argv[0]),'paths.txt')) as f:
flines = f.readlines()
image_dir = flines[0].strip()
mask_dir = flines[1].strip()
results_dir = flines[2].strip()
patient_nr = flines[3].strip()
# read in data
image = sitk.ReadImage(image_dir)
mask = sitk.ReadImage(mask_dir)
mask = sitk.Cast( mask, sitk.sitkUInt8)
mask.SetDirection(image.GetDirection())
mask.SetOrigin(image.GetOrigin())
mask_array = sitk.GetArrayFromImage(mask)
image_array = sitk.GetArrayFromImage(image)
# get slices
slice = mask_array.sum(axis = (1,2)) > 0
index = np.where(slice==1)
#---Whole prostate---#
# whole prostate - firstorder
region_class = 'wholeprostate'
feature_class = 'firstorder'
settings = getSettings(image_array,mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# whole prostate - shape 3d
region_class = 'wholeprostate'
feature_class = 'shape'
settings = getSettings(image_array,mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# whole prostate - glcm
region_class = 'wholeprostate'
feature_class = 'glcm'
settings = getSettings(image_array,mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# whole prostate - glrlm
region_class = 'wholeprostate'
feature_class = 'glrlm'
settings = getSettings(image_array,mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# whole prostate - glszm
region_class = 'wholeprostate'
feature_class = 'glszm'
settings = getSettings(image_array,mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# whole prostate - ngtdm
region_class = 'wholeprostate'
feature_class = 'ngtdm'
settings = getSettings(image_array,mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# whole prostate - gldm
region_class = 'wholeprostate'
feature_class = 'gldm'
settings = getSettings(image_array,mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
#---Apex---#
# prepare input
slice_nr = index[0][0:math.floor(np.size(index)/3)]
new_mask = getSliceFromMask(mask,slice_nr)
new_mask_array = sitk.GetArrayFromImage(new_mask)
# apex- firstorder
region_class = 'apex'
feature_class = 'firstorder'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# apex - shape 3d
region_class = 'apex'
feature_class = 'shape'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# apex - glcm
region_class = 'apex'
feature_class = 'glcm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# apex - glrlm
region_class = 'apex'
feature_class = 'glrlm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# apex - glszm
region_class = 'apex'
feature_class = 'glszm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# apex - ngtdm
region_class = 'apex'
feature_class = 'ngtdm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# apex - gldm
region_class = 'apex'
feature_class = 'gldm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
#---Base---#
# prepare input
slice_nr = index[0][np.size(index)-math.floor(np.size(index)/3):]
new_mask = getSliceFromMask(mask,slice_nr)
new_mask_array = sitk.GetArrayFromImage(new_mask)
# base - firstorder
region_class = 'base'
feature_class = 'firstorder'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# base - shape 3d
region_class = 'base'
feature_class = 'shape'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# base - glcm
region_class = 'base'
feature_class = 'glcm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# base - glrlm
region_class = 'base'
feature_class = 'glrlm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# base - glszm
region_class = 'base'
feature_class = 'glszm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# base - ngtdm
region_class = 'base'
feature_class = 'ngtdm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
# base - gldm
region_class = 'base'
feature_class = 'gldm'
settings = getSettings(image_array,new_mask_array,64)
extractor = radiomics.featureextractor.RadiomicsFeatureExtractor(**settings)
extractor.disableAllFeatures()
extractor.enableFeatureClassByName(feature_class)
featureVector = extractor.execute(image,new_mask)
for key in featureVector.keys():
if type(featureVector[key]) == type(np.array(1)):
featureVector[key] = float(featureVector[key])
results_name = patient_nr+'_'+region_class+'_'+feature_class+'.json'
with open(os.path.join(results_dir,results_name), 'w') as f:
f.write(json.dumps(featureVector))
| 41.404639
| 108
| 0.734765
| 1,892
| 16,065
| 6.048626
| 0.067653
| 0.066061
| 0.040371
| 0.026914
| 0.913579
| 0.869102
| 0.862723
| 0.847868
| 0.847868
| 0.847868
| 0
| 0.007447
| 0.138998
| 16,065
| 387
| 109
| 41.511628
| 0.81991
| 0.03629
| 0
| 0.879365
| 0
| 0
| 0.030355
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006349
| false
| 0
| 0.022222
| 0
| 0.034921
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a9a325185f4b20eb87b0d5980ea4981fc72cc8d
| 11,100
|
py
|
Python
|
tests/operators/test_agnostic_stats_check.py
|
guohui-gao/astro
|
4e63302bc5c69401b10568598c4ff738e21563f5
|
[
"Apache-2.0"
] | null | null | null |
tests/operators/test_agnostic_stats_check.py
|
guohui-gao/astro
|
4e63302bc5c69401b10568598c4ff738e21563f5
|
[
"Apache-2.0"
] | null | null | null |
tests/operators/test_agnostic_stats_check.py
|
guohui-gao/astro
|
4e63302bc5c69401b10568598c4ff738e21563f5
|
[
"Apache-2.0"
] | null | null | null |
"""
Unittest module to test Operators.
Requires the unittest, pytest, and requests-mock Python libraries.
"""
import logging
import os
import pathlib
import unittest.mock
from airflow.models import DAG
from airflow.utils import timezone
# Import Operator
import astro.sql as aql
from astro.sql.table import Table
from tests.operators import utils as test_utils
log = logging.getLogger(__name__)
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
class TestStatsCheckOperator(unittest.TestCase):
"""
Test Stats Check Operator.
"""
cwd = pathlib.Path(__file__).parent
@classmethod
def setUpClass(cls):
super().setUpClass()
aql.load_file(
path=str(cls.cwd) + "/../data/homes.csv",
output_table=Table(
"stats_check_test_1",
conn_id="postgres_conn",
database="pagila",
schema="public",
),
).operator.execute({"run_id": "foo"})
aql.load_file(
path=str(cls.cwd) + "/../data/homes2.csv",
output_table=Table(
"stats_check_test_2",
conn_id="postgres_conn",
database="pagila",
schema="public",
),
).operator.execute({"run_id": "foo"})
aql.load_file(
path=str(cls.cwd) + "/../data/homes3.csv",
output_table=Table(
"stats_check_test_3",
conn_id="postgres_conn",
database="pagila",
schema="public",
),
).operator.execute({"run_id": "foo"})
cls.Stats_check_table_4 = test_utils.get_table_name("stats_check_test_4")
aql.load_file(
path=str(cls.cwd) + "/../data/homes.csv",
output_table=Table(
table_name=cls.Stats_check_table_4,
conn_id="snowflake_conn",
schema=os.getenv("SNOWFLAKE_SCHEMA"),
database=os.getenv("SNOWFLAKE_DATABASE"),
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"),
),
).operator.execute({"run_id": "foo"})
cls.Stats_check_table_5 = test_utils.get_table_name("stats_check_test_5")
aql.load_file(
path=str(cls.cwd) + "/../data/homes2.csv",
output_table=Table(
table_name=cls.Stats_check_table_5,
conn_id="snowflake_conn",
schema=os.getenv("SNOWFLAKE_SCHEMA"),
database=os.getenv("SNOWFLAKE_DATABASE"),
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"),
),
).operator.execute({"run_id": "foo"})
cls.Stats_check_table_6 = test_utils.get_table_name("stats_check_test_6")
aql.load_file(
path=str(cls.cwd) + "/../data/homes3.csv",
output_table=Table(
table_name=cls.Stats_check_table_6,
conn_id="snowflake_conn",
schema=os.getenv("SNOWFLAKE_SCHEMA"),
database=os.getenv("SNOWFLAKE_DATABASE"),
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"),
),
).operator.execute({"run_id": "foo"})
@classmethod
def tearDownClass(cls) -> None:
test_utils.drop_table_snowflake(
table_name=cls.Stats_check_table_4, # type: ignore
database=os.getenv("SNOWFLAKE_DATABASE"), # type: ignore
schema=os.getenv("SNOWFLAKE_SCHEMA"), # type: ignore
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"), # type: ignore
conn_id="snowflake_conn",
)
test_utils.drop_table_snowflake(
table_name=cls.Stats_check_table_5, # type: ignore
database=os.getenv("SNOWFLAKE_DATABASE"), # type: ignore
schema=os.getenv("SNOWFLAKE_SCHEMA"), # type: ignore
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"), # type: ignore
conn_id="snowflake_conn",
)
test_utils.drop_table_snowflake(
table_name=cls.Stats_check_table_6, # type: ignore
database=os.getenv("SNOWFLAKE_DATABASE"), # type: ignore
schema=os.getenv("SNOWFLAKE_SCHEMA"), # type: ignore
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"), # type: ignore
conn_id="snowflake_conn",
)
def clear_run(self):
self.run = False
def setUp(self):
super().setUp()
self.clear_run()
self.addCleanup(self.clear_run)
self.dag = DAG(
"test_dag",
default_args={
"owner": "airflow",
"start_date": DEFAULT_DATE,
},
)
def test_stats_check_postgres_outlier_exists(self):
try:
a = aql.stats_check(
main_table=Table(
"stats_check_test_1",
database="pagila",
conn_id="postgres_conn",
schema="public",
),
compare_table=Table(
"stats_check_test_2",
database="pagila",
conn_id="postgres_conn",
schema="public",
),
checks=[aql.OutlierCheck("room_check", {"rooms": "rooms"}, 2, 0.0)],
max_rows_returned=10,
)
a.execute({"run_id": "foo"})
assert False
except ValueError as e:
assert True
def test_stats_check_postgres_outlier_not_exists(self):
try:
a = aql.stats_check(
main_table=Table(
"stats_check_test_1",
database="pagila",
conn_id="postgres_conn",
schema="public",
),
compare_table=Table(
"stats_check_test_3",
database="pagila",
conn_id="postgres_conn",
schema="public",
),
checks=[aql.OutlierCheck("room_check", {"rooms": "rooms"}, 2, 0.0)],
max_rows_returned=10,
)
a.execute({"run_id": "foo"})
assert True
except ValueError as e:
assert False
def test_stats_check_snowflake_outlier_exists(self):
try:
a = aql.stats_check(
main_table=Table(
table_name=self.Stats_check_table_4,
conn_id="snowflake_conn",
schema=os.getenv("SNOWFLAKE_SCHEMA"),
database=os.getenv("SNOWFLAKE_DATABASE"),
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"),
),
compare_table=Table(
table_name=self.Stats_check_table_5,
conn_id="snowflake_conn",
schema=os.getenv("SNOWFLAKE_SCHEMA"),
database=os.getenv("SNOWFLAKE_DATABASE"),
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"),
),
checks=[aql.OutlierCheck("room_check", {"rooms": "rooms"}, 2, 0.0)],
max_rows_returned=10,
)
a.execute({"run_id": "foo"})
assert False
except ValueError as e:
assert True
def test_stats_check_snowflake_outlier_not_exists(self):
try:
a = aql.stats_check(
main_table=Table(
table_name=self.Stats_check_table_4,
conn_id="snowflake_conn",
schema=os.getenv("SNOWFLAKE_SCHEMA"),
database=os.getenv("SNOWFLAKE_DATABASE"),
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"),
),
compare_table=Table(
table_name=self.Stats_check_table_6,
conn_id="snowflake_conn",
schema=os.getenv("SNOWFLAKE_SCHEMA"),
database=os.getenv("SNOWFLAKE_DATABASE"),
warehouse=os.getenv("SNOWFLAKE_WAREHOUSE"),
),
checks=[
aql.OutlierCheck(
"room_check", {"rooms": "rooms", "taxes": "taxes"}, 2, 0.0
)
],
max_rows_returned=10,
)
a.execute({"run_id": "foo"})
assert True
except ValueError as e:
assert False
class TestBIGQueryIntegrationWithStatsCheckOperator(unittest.TestCase):
"""
Test Stats Check Operator.
"""
cwd = pathlib.Path(__file__).parent
@classmethod
def setUpClass(cls):
super().setUpClass()
aql.load_file(
path=str(cls.cwd) + "/../data/homes.csv",
output_table=Table(
"stats_check_test_1", conn_id="bigquery", schema="tmp_astro"
),
).operator.execute({"run_id": "foo"})
aql.load_file(
path=str(cls.cwd) + "/../data/homes2.csv",
output_table=Table(
"stats_check_test_2", conn_id="bigquery", schema="tmp_astro"
),
).operator.execute({"run_id": "foo"})
aql.load_file(
path=str(cls.cwd) + "/../data/homes3.csv",
output_table=Table(
"stats_check_test_3", conn_id="bigquery", schema="tmp_astro"
),
).operator.execute({"run_id": "foo"})
@classmethod
def tearDownClass(cls) -> None:
pass
def clear_run(self):
self.run = False
def setUp(self):
super().setUp()
self.clear_run()
self.addCleanup(self.clear_run)
self.dag = DAG(
"test_dag",
default_args={
"owner": "airflow",
"start_date": DEFAULT_DATE,
},
)
def test_stats_check_bigQuery_outlier_exists(self):
try:
a = aql.stats_check(
main_table=Table(
"stats_check_test_1", conn_id="bigquery", schema="tmp_astro"
),
compare_table=Table(
"stats_check_test_2", conn_id="bigquery", schema="tmp_astro"
),
checks=[aql.OutlierCheck("room_check", {"rooms": "rooms"}, 2, 0.0)],
max_rows_returned=10,
)
a.execute({"run_id": "foo"})
assert False
except ValueError as e:
assert True
def test_stats_check_postgres_bigQuery_not_exists(self):
try:
a = aql.stats_check(
main_table=Table(
"stats_check_test_1", conn_id="bigquery", schema="tmp_astro"
),
compare_table=Table(
"stats_check_test_3", conn_id="bigquery", schema="tmp_astro"
),
checks=[aql.OutlierCheck("room_check", {"rooms": "rooms"}, 2, 0.0)],
max_rows_returned=10,
)
a.execute({"run_id": "foo"})
assert True
except ValueError as e:
assert False
| 34.796238
| 84
| 0.521532
| 1,118
| 11,100
| 4.886404
| 0.105546
| 0.080542
| 0.093355
| 0.041186
| 0.91598
| 0.915431
| 0.90756
| 0.906645
| 0.887424
| 0.88413
| 0
| 0.0102
| 0.364054
| 11,100
| 318
| 85
| 34.90566
| 0.763706
| 0.02964
| 0
| 0.845878
| 0
| 0
| 0.164459
| 0
| 0
| 0
| 0
| 0
| 0.043011
| 1
| 0.050179
| false
| 0.003584
| 0.032258
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a59d0d1fbfd0c8d4a0b30af852aa7dfae390d0c
| 58,839
|
py
|
Python
|
Library/learningstudio/content/service.py
|
lileeayee/LearningStudio-Libraries-Python
|
d073cd8a4b221458789592900651566b797b6125
|
[
"Apache-2.0"
] | null | null | null |
Library/learningstudio/content/service.py
|
lileeayee/LearningStudio-Libraries-Python
|
d073cd8a4b221458789592900651566b797b6125
|
[
"Apache-2.0"
] | null | null | null |
Library/learningstudio/content/service.py
|
lileeayee/LearningStudio-Libraries-Python
|
d073cd8a4b221458789592900651566b797b6125
|
[
"Apache-2.0"
] | null | null | null |
"""
LearningStudio RESTful API Libraries
These libraries make it easier to use the LearningStudio Course APIs.
Full Documentation is provided with the library.
Need Help or Have Questions?
Please use the PDN Developer Community at https://community.pdn.pearson.com
:category LearningStudio Course APIs
:author Wes Williams <wes.williams@pearson.com>
:author Pearson Developer Services Team <apisupport@pearson.com>
:copyright 2014 Pearson Education Inc.
:license http://www.apache.org/licenses/LICENSE-2.0 Apache 2.0
:version 1.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
service
-------
Defines a class that implements :class:`learningstudio.core.service.BasicService` for the Content API.
"""
import json
from learningstudio.core.service import BasicService, HttpStatusCode
from learningstudio.oauth.util import json_loads
PATH_COURSES_ITEMS = '/courses/%s/items'
PATH_COURSES_ITEMS_ = '/courses/%s/items/%s'
PATH_COURSES_ITEMHIERARCHY = '/courses/%s/itemHierarchy'
PATH_COURSES_TEXTMULTIMEDIAS = '/courses/%s/textMultimedias'
PATH_COURSES_TEXTMULTIMEDIAS__CONTENTPATH_ = '/courses/%s/textMultimedias/%s/%s'
PATH_COURSES_TEXTMULTIMEDIAS__CONTENTPATH__USESOURCEDOMAIN = '/courses/%s/textMultimedias/%s/%s?useSourceDomain=true'
PATH_COURSES_TEXTMULTIMEDIAS_ = '/courses/%s/textMultimedias/%s'
PATH_COURSES_MSOFFICEDOCUMENTS = '/courses/%s/msOfficeDocuments'
PATH_COURSES_MSOFFICEDOCUMENTS_ = '/courses/%s/msOfficeDocuments/%s'
PATH_COURSES_MSOFFICEDOCUMENTS_ORIGINALDOCUMENT = '/courses/%s/msOfficeDocuments/%s/originalDocument'
PATH_COURSES_MSOFFICEDOCUMENTS_CONTENT_ = '/courses/%s/msOfficeDocuments/%s/content/%s'
PATH_COURSES_WEBCONTENTUPLOADS = '/courses/%s/webContentUploads'
PATH_COURSES_WEBCONTENTUPLOADS_ = '/courses/%s/webContentUploads/%s'
PATH_COURSES_WEBCONTENTUPLOADS_ORIGINALDOCUMENT = '/courses/%s/webContentUploads/%s/originalDocument'
PATH_COURSES_WEBCONTENTUPLOADS_CONTENT_ = '/courses/%s/webContentUploads/%s/content/%s'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSEHIEARCHY = '/courses/%s/threadedDiscussions/%s/topics/%s/responseHierarchy'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEHIEARCHY = '/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s/responseHierarchy'
PATH_USERS_COURSES_ITEMS = '/users/%s/courses/%s/items'
PATH_USERS_COURSES_ITEMHIERARCHY = '/users/%s/courses/%s/itemHierarchy'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES_USERVIEWRESPONSES = '/users/%s/courses/%s/threadedDiscussions/%s/topics/%s/userviewresponses/%s/userviewresponses'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES_USERVIEWRESPONSES__DEPTH = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES_USERVIEWRESPONSES + '?depth=%s'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES = '/users/%s/courses/%s/threadedDiscussions/%s/topics/%s/userviewresponses'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES__DEPTH = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES + '?depth=%s'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSECOUNTS = '/users/%s/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s/responseCounts'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSECOUNTS__DEPTH = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSECOUNTS + '?depth=%s'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSECOUNTS = '/users/%s/courses/%s/threadedDiscussions/%s/topics/%s/responseCounts'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSECOUNTS__DEPTH = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSECOUNTS + '?depth=%s'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEBRANCH = '/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s/responseBranch'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEAUTHOR = '/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s/responseAuthor'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEANDAUTHORCOMPS = '/courses/%s/threadeddiscussions/%s/topics/%s/responses/%s/responseAndAuthorComps'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEANDAUTHORCOMPS__DEPTH = '/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s/responseAndAuthorComps?depth=%s'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSEANDAUTHORCOMPS = '/courses/%s/threadedDiscussions/%s/topics/%s/responseAndAuthorComps'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSEANDAUTHORCOMPS__DEPTH = '/courses/%s/threadedDiscussions/%s/topics/%s/responseAndAuthorComps?depth=%s'
PATH_USERS_COURSES_THREADEDDISCUSSIONS__LASTRESPONSE = '/users/%s/courses/%s/threadedDiscussions/lastResponse'
PATH_COURSES_THREADEDDISCUSSIONS = '/courses/%s/threadedDiscussions'
PATH_COURSES_THREADEDDISCUSSIONS__USESOURCEDOMAIN = '/courses/%s/threadedDiscussions?UseSourceDomain=true'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS = '/courses/%s/threadedDiscussions/%s/topics'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS__USESOURCEDOMAIN = '/courses/%s/threadedDiscussions/%s/topics?UseSourceDomain=true'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_ = '/courses/%s/threadedDiscussions/%s/topics/%s'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_USESOURCEDOMAIN = '/courses/%s/threadedDiscussions/%s/topics/%s?UseSourceDomain=true'
PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSE_READSTATUS = '/users/%s/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s/readStatus'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSES = '/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s/responses'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_ = '/courses/%s/threadedDiscussions/%s/topics/%s/responses/%s'
PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES = '/courses/%s/threadedDiscussions/%s/topics/%s/responses'
class ContentService(BasicService):
"""An implementation of BasicService for handling the Contents API.
"""
def __init__(self, oauth_service_factory):
super(ContentService, self).__init__(oauth_service_factory)
def __getServiceIdentifier():
return "LS-Library-Content-Python-V1"
def getItems(self, courseId):
"""Get items for a course with
``GET /courses/{courseId}/items``
using OAuth1 or OAuth2 as a teacher, teaching assistant or administrator.
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_ITEMS % (courseId,)
return self.doGet(relativeUrl)
def getItem(self, courseId, itemId):
"""Get a specific item for a course with
``GET /courses/{courseId}/items/{itemId}``
using OAuth1 or OAuth2.
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_ITEMS_ % (courseId, itemId,)
return self.doGet(relativeUrl)
def getItemContent(self, courseId, itemId):
"""Get content for a specific item in a course with
``getItem(courseId, itemId)``
by following the links to the item itself
and next to the contentUrl
:param courseId: ID of the course.
:type courseId: str
:param itemId: ID of the content item.
:type itemId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
# get the item details
response = self.getItem(courseId, itemId)
if response.isError():
return response
itemsJson = response.getContent()
json_data = json_loads(itemsJson)
# link to the item is in the link where title is null
items = json_data.get('items')
links = items[0]['links']
for link in links:
if link.get('title') is None:
relativeUrl = self.__getRelativePath(link['href'])
# get the item
response = self.doGet(relativeUrl)
if response.isError():
return response
itemJson = response.getContent()
json_data = json_loads(itemJson)
# get the item content location
contentType, contentPath = json_data.popitem()
relativeUrl = self.__getRelativePath(contentPath[0]['contentUrl'])
# get and return the item content
return self.doGet(relativeUrl)
def getItemHierarchy(self, courseId):
"""Get item hierarchy for a course with
``GET /courses/{courseId}/itemHierarchy``
using OAuth1 or OAuth2 as a teacher, teaching assistant, or administrator
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_ITEMHIERARCHY % (courseId,)
return self.doGet(relativeUrl)
def getUserItemHierarchy(self, userId, courseId):
"""Get user item hierarchy for a course with
``GET /users/{userId}/courses/{courseId}/itemHierarchy``
using OAuth1 or OAuth2 as a teacher, teaching assistant, or administrator
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_ITEMHIERARCHY % (userId, courseId,)
return self.doGet(relativeUrl)
def getUserItems(self, userId, courseId):
"""Get user items in a course with
``GET /users/{userId}/courses/{courseId}/items``
using OAuth1 or OAuth2 as a teacher, teaching assistant or administrator.
:param userId: ID of the user.
:type courseId: str
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_ITEMS % (userId, courseId,)
return self.doGet(relativeUrl)
def getItemLinkDetails(self, courseId, itemId):
"""Get links details from a specific item for a course with
``GET /courses/{courseId}/items/{itemId}``
using OAuth2 as a student, teacher or teaching assistant.
Example JSON structure: (Multimedia item)::
{ "details":
{ "access": {...},
"schedule": {...},
"self": {...},
"selfType": "textMultimedias"
}
}
:param courseId: ID of the course.
:type courseId: str
:param itemId: ID of the item.
:type itemId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
response = self.getItem(courseId, itemId)
if response.isError(): return response
courseItemsJson = response.getContent()
json_data = json_loads(courseItemsJson)
items = json_data.get('items')
detail = {}
if items != None and len(items) > 0:
for item in items:
links = item.get('links')
for link in links:
relativeUrl = self.__getRelativePath(link.get('href'))
response = self.doGet(relativeUrl)
if (response.isError()): return response
linkElement = json_loads(response.getContent())
title = link.get('title')
if title == None:
if linkElement != None and len(linkElement) > 0:
for key in linkElement.keys():
value = linkElement[key]
detail['self'] = value
detail['selfType'] = key
break
else:
linkElement = linkElement.get(title)
detail[title] = linkElement
detailWrapper = {'details': detail}
response.setContent(json.dumps(detailWrapper))
else:
raise RuntimeError('Unexpected condition in library: No items')
return response
def getTextMultimedias(self, courseId):
"""Get text multimedias by course with
``GET /courses/{courseId}/textMultimedias``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param itemId: ID of the item.
:type itemId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_TEXTMULTIMEDIAS % (courseId,)
return self.doGet(relativeUrl)
def getTextMultimedia(self, courseId, textMediaId):
"""Get specific text multimedia content by course with
``GET /courses/{courseId}/textMultimedias``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param textMediaId: ID of the text media.
:type textMediaId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_TEXTMULTIMEDIAS_ % (courseId, textMediaId,)
return self.doGet(relativeUrl)
def getTextMultimediasContent(self, courseId, textMediaId, contentPath = None):
"""Get specific text multimedia content by course with UseSourceDomain parameter with
``GET /courses/{courseId}/textMultimedias`` and
``GET /courses/{courseId}/textMultimedias?UseSourceDomain=true``
using OAuth2 as a student, teacher or teaching assistant
:param courseId: ID of the course.
:type courseId: str
:param itemId: ID of the item.
:type itemId: str
:param contentPath: Path of content.
:type contentPath: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content
"""
if contentPath != None:
return self.__getTextMultimediasContent(courseId, textMediaId, contentPath)
response = self.getTextMultimedia(courseId, textMediaId)
if response.isError(): return response
json_data = json_loads(response.getContent())
json_data = json_data.get('textMultimedias')[0]
contentUrl = json_data.get('contentUrl')
relativeUrl = self.__getRelativePath(contentUrl)
return self.doGet(relativeUrl)
#if use_source_domain != None:
# return self.__getTextMultimediasContent(courseId, textMediaId, contentPath, use_source_domain)
#else:
# relativeUrl = PATH_COURSES_TEXTMULTIMEDIAS__CONTENTPATH_ % (courseId, textMediaId, contentPath,)
# return self.doGet(relativeUrl)
def __getTextMultimediasContent(self, courseId, textMediaId, contentPath):
"""Get specific text multimedia content by course with UseSourceDomain parameter with
``GET /courses/{courseId}/textMultimedias`` and
``GET /courses/{courseId}/textMultimedias?UseSourceDomain=true``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course
:type courseId: str
:param itemId: ID of the item.
:type itemId: str
:param contentPath: Path of content.
:type contentPath: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_TEXTMULTIMEDIAS__CONTENTPATH_ % (courseId, textMediaId, contentPath,)
return self.doGet(relativeUrl)
def getMsOfficeDocuments(self, courseId):
"""Get all MS Office documents in a course with
``GET /courses/{courseId}/msOfficeDocuments``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_MSOFFICEDOCUMENTS % (courseId,)
return self.doGet(relativeUrl)
def getMsOfficeDocument(self, courseId, msOfficeDocumentId):
"""Get a specific MS Office document in a course with
``GET /courses/{courseId}/msOfficeDocuments/{msOfficeDocumentId}``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param msOfficeDocumentId: ID of the ms office document.
:type msOfficeDocumentId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_MSOFFICEDOCUMENTS_ % (courseId, msOfficeDocumentId,)
return self.doGet(relativeUrl)
def getMsOfficeDocumentContent(self, courseId, msOfficeDocumentId, content_path = None):
"""Get content for a specific MS Office Document in a course with
``GET /courses/{courseId}/msOfficeDocuments/{msOfficeDocumentId}`` and
``GET /courses/{courseId}/msOfficeDocuments/{msOfficeDocumentId}/content/{contentPath}``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param msOfficeDocumentId: ID of the ms office document.
:type msOfficeDocumentId: str
:key content_path: Path of the content.
:type content_path: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if content_path != None:
return self.__getMsOfficeDocumentContent(courseId, msOfficeDocumentId, content_path)
response = self.getMsOfficeDocument(courseId, msOfficeDocumentId)
if response.isError(): return response
json_data = json_loads(response.getContent())
json_data = json_data.get('msOfficeDocuments')[0]
contentUrl = json_data.get('contentUrl')
relativeUrl = self.__getRelativePath(contentUrl)
return self.doGet(relativeUrl)
def __getMsOfficeDocumentContent(self, courseId, msOfficeDocumentId, contentPath):
"""Get content for a specific MS Office Document in a course with
``GET /courses/{courseId}/msOfficeDocuments/{msOfficeDocumentId}`` and
``GET /courses/{courseId}/msOfficeDocuments/{msOfficeDocumentId}/content/{contentPath}``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param msOfficeDocumentId: ID of the ms office document.
:type msOfficeDocumentId: str
:param contentPath: Path of the content.
:type contentPath: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_MSOFFICEDOCUMENTS_CONTENT_ % (courseId, msOfficeDocumentId, contentPath,)
return self.doGet(relativeUrl)
def getMsOfficeDocumentOriginal(self, courseId, msOfficeDocumentId):
"""Get the original of a specific MS Office document in a course with
``GET /courses/{courseId}/msOfficeDocuments/{msOfficeDocumentId}/originalDocument``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param msOfficeDocumentId: ID of the ms office document.
:type msOfficeDocumentId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_MSOFFICEDOCUMENTS_ORIGINALDOCUMENT % (courseId, msOfficeDocumentId,)
return self.doGet(relativeUrl)
def getWebContentUploads(self, courseId):
"""Get all web content uploads in a course with
``GET /courses/{courseId}/webContentUploads``
using OAuth2 as a student, teacher or teaching assistant
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_WEBCONTENTUPLOADS % (courseId,)
return self.doGet(relativeUrl)
def getWebContentUpload(self, courseId, webContentUploadId):
"""Get a specific MS Office document in a course with
``GET /courses/{courseId}/webContentUploads/{webContentUploadId}``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param webContentUploadId: ID of the ms office document.
:type webContentUploadId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_WEBCONTENTUPLOADS_ % (courseId, webContentUploadId,)
return self.doGet(relativeUrl)
def getWebContentUploadOriginal(self, courseId, webContentUploadId):
"""Get a specific MS Office document in a course with
``GET /courses/{courseId}/webContentUploads/{webContentUploadId}``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param webContentUploadId: ID of the ms office document.
:type webContentUploadId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_WEBCONTENTUPLOADS_ORIGINALDOCUMENT % (courseId, webContentUploadId,)
return self.doGet(relativeUrl)
def getWebContentUploadContent(self, courseId, webContentUploadId, content_path = None):
"""Get content for a specific Web Content Upload in a course with
``GET /courses/{courseId}/webContentUpload/{webContentUploadId}`` and
``GET /courses/{courseId}/webContentUpload/{webContentUploadId}/content/{contentPath}``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param webContentUploadId: ID of the web content upload.
:type webContentUploadId: str
:key content_path: Path of the content.
:type content_path: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if content_path != None: return self.__getWebContentUploadContent(courseId, webContentUploadId, content_path)
response = self.getWebContentUpload(courseId, webContentUploadId)
if response.isError(): return response
json_data = json_loads(response.getContent())
json_data = json_data.get('webContentUploads')[0]
contentUrl = json_data.get('contentUrl')
relativeUrl = self.__getRelativePath(contentUrl)
return self.doGet(relativeUrl)
def __getWebContentUploadContent(self, courseId, webContentUploadId, contentPath):
"""Get content for a specific Web Content Upload in a course with
``GET /courses/{courseId}/webContentUpload/{webContentUploadId}`` and
``GET /courses/{courseId}/webContentUpload/{webContentUploadId}/content/{contentPath}``
using OAuth2 as a student, teacher or teaching assistant.
:param courseId: ID of the course.
:type courseId: str
:param webContentUploadId: ID of the web content upload.
:type webContentUploadId: str
:param contentPath: Path of the content.
:type contentPath: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_WEBCONTENTUPLOADS_CONTENT_ % (courseId, webContentUploadId, contentPath,)
return self.doGet(relativeUrl)
def getThreadedDiscussionResponseHierarchy(self, courseId, threadId, topicId, responseId):
"""Get hierarchy of a discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responseHierarchy``
using OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEHIEARCHY % (courseId, threadId, topicId, responseId,)
return self.doGet(relativeUrl)
def getThreadedDiscussionUserViewResponses(self, userId, courseId, threadId,
topicId, responseId, depth = None):
"""Get all user's view statuses of a discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/userviewresponses/{responseId}/userviewresponses``
using OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:key depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if depth != None: return self.__getThreadedDiscussionUserViewResponses(userId, courseId, threadId,
topicId, responseId, depth)
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES_USERVIEWRESPONSES % (userId, courseId,
threadId, topicId,
responseId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionUserViewResponses(self, userId, courseId, threadId, topicId,
responseId, depth):
"""Get all user's view statuses of a discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/userviewresponses/{responseId}/userviewresponses?depth={depth}``
using OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:param depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES_USERVIEWRESPONSES__DEPTH % (userId, courseId,
threadId, topicId,
responseId, depth,)
return self.doGet(relativeUrl)
def getThreadedDiscussionTopicUserViewResponses(self, userId, courseId, threadId, topicId, depth = None):
"""Get all user's view statuses of a discussion thread topic with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/userviewresponses``
using OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:key depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if depth != None: return self.__getThreadedDiscussionTopicUserViewResponses(userId, courseId, threadId, topicId, depth)
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES % (userId, courseId, threadId, topicId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionTopicUserViewResponses(self, userId, courseId, threadId, topicId, depth):
"""Get all user's view statuses of a discussion thread topic with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/userviewresponses?depth={depth}``
using OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param depth: Number of levels to traverse
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_USERVIEWRESPONSES__DEPTH % (userId, courseId, threadId, topicId, depth,)
return self.doGet(relativeUrl)
def getThreadedDiscussionTopicHierarchy(self, courseId, threadId, topicId):
"""Get hierarchy of a discussion thread topic with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responseHierarchy``
using OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSEHIEARCHY % (courseId, threadId, topicId,)
return self.doGet(relativeUrl)
def getThreadedDiscussionResponseCount(self, userId, courseId, threadId, topicId, responseId, depth = None):
"""Get count of responses for a specific response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responseCounts``
using OAuth1 or OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:key depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if depth != None: return self.__getThreadedDiscussionResponseCount(userId, courseId, threadId,
topicId, responseId, depth)
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSECOUNTS % (userId, courseId,
threadId, topicId,
responseId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionResponseCount(self, userId, courseId, threadId, topicId, responseId, depth):
"""Get count of responses for a specific response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responseCounts?depth={depth}``
using OAuth1 or OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:param depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSECOUNTS__DEPTH % (userId, courseId,
threadId, topicId,
responseId, depth)
return self.doGet(relativeUrl)
def getThreadedDiscussionTopicResponseCount(self, userId, courseId, threadId, topicId, depth = None):
"""Get count of responses for a specific topic with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responseCounts``
using OAuth1 or OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:key depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if depth != None: return self.__getThreadedDiscussionTopicResponseCount(userId, courseId, threadId, topicId, depth)
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSECOUNTS % (userId, courseId, threadId, topicId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionTopicResponseCount(self, userId, courseId, threadId, topicId, depth):
"""Get count of responses for a specific topic with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responseCounts?depth={depth}``
using OAuth1 or OAuth2 as a student.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSECOUNTS__DEPTH % (userId, courseId, threadId, topicId, depth,)
return self.doGet(relativeUrl)
def getThreadedDiscussionResponseBranch(self, courseId, threadId, topicId, responseId):
"""Get branch hierarchy to a discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responseBranch``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response
:type responseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEBRANCH % (courseId, threadId, topicId, responseId,)
return self.doGet(relativeUrl)
def getThreadedDiscussionResponseAuthor(self, courseId, threadId, topicId, responseId):
"""Get author of a discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responseAuthor``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEAUTHOR % (courseId, threadId, topicId, responseId,)
return self.doGet(relativeUrl)
def getThreadedDiscussionResponseAndAuthorComposite(self, courseId, threadId, topicId, responseId, depth = None):
"""Get response and author composite of a discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responseAndAuthorComps``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:key depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if depth != None: return self.__getThreadedDiscussionResponseAndAuthorComposite(courseId, threadId, topicId, responseId, depth)
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEANDAUTHORCOMPS % (courseId, threadId, topicId, responseId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionResponseAndAuthorComposite(self, courseId, threadId, topicId, responseId, depth):
"""Get response and author composite for a discussion thread response at a specified depth with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responseAndAuthorComps?depth={depth}``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:param depth: Max depth to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSEANDAUTHORCOMPS__DEPTH % (courseId, threadId,
topicId, responseId, depth,)
return self.doGet(relativeUrl)
def getThreadedDiscussionTopicResponseAndAuthorComposite(self, courseId, threadId, topicId, depth = None):
"""Get response and author composite for a discussion thread topic with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responseAndAuthorComps/{responseId}/responseAndAuthorComps``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:key depth: Number of levels to traverse.
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if depth != None: return self.__getThreadedDiscussionTopicResponseAndAuthorComposite(courseId, threadId, topicId, depth)
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSEANDAUTHORCOMPS % (courseId, threadId, topicId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionTopicResponseAndAuthorComposite(self, courseId, threadId, topicId, depth):
"""Get response and author composite of a discussion thread topic at a specified depth with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responseAndAuthorComps/{responseId}/responseAndAuthorComps?depth={depth}``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param depth: Max depth to traverse
:type depth: int
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSEANDAUTHORCOMPS__DEPTH % (courseId, threadId, topicId, depth,)
return self.doGet(relativeUrl)
def getLastThreadedDiscussionResponse(self, userId, courseId):
"""Get a user's last threaded discussion response in a course with
``GET /users/{userId}/courses/{courseId}/threadeddiscussions/lastResponse``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param userId: ID of the user.
:type userId: str
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS__LASTRESPONSE % (userId, courseId,)
return self.doGet(relativeUrl)
def getThreadedDiscussions(self, courseId, use_source_domain = None):
"""Get threaded dicussions for a course with
``GET /courses/{courseId}/threadeddiscussions``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:key use_source_domain: Indicator of whether to use the source domain in links.
:type use_source_domain: bool
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if use_source_domain == True: return self.__getThreadedDiscussions(courseId)
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS % (courseId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussions(self, courseId):
"""Get threaded dicussions for a course with
``GET /courses/{courseId}/threadeddiscussions?UseSourceDomain={useSourceDomain}``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS__USESOURCEDOMAIN % (courseId,)
return self.doGet(relativeUrl)
def getThreadedDiscussionTopics(self, courseId, threadId, use_source_domain = None):
"""Get threaded dicussion topics for a course with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:key use_source_domain: Indicator of whether to use the source domain in links.
:type use_source_domain: bool
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if use_source_domain == True: return self.__getThreadedDiscussionTopics(courseId, threadId)
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS % (courseId, threadId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionTopics(self, courseId, threadId):
"""Get threaded dicussion topics for a course with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics?UseSourceDomain={useSourceDomain}``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS__USESOURCEDOMAIN % (courseId, threadId,)
return self.doGet(relativeUrl)
def getThreadedDiscussionTopic(self, courseId, threadId, topicId, use_source_domain = None):
"""Get threaded dicussion topics for a course with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:key use_source_domain: Indicator of whether to use the source domain in links.
:type use_source_domain: bool
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
if use_source_domain == True: return self.__getThreadedDiscussionTopic(courseId, threadId, topicId)
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_ % (courseId, threadId, topicId,)
return self.doGet(relativeUrl)
def __getThreadedDiscussionTopic(self, courseId, threadId, topicId):
"""Get threaded dicussion topics for a course with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}?UseSourceDomain={useSourceDomain}``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param useSourceDomain: Indicator of whether to use the source domain in links.
:type useSourceDomain: bool
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_USESOURCEDOMAIN % (courseId, threadId, topicId,)
return self.doGet(relativeUrl)
def getThreadedDiscussionResponseReadStatus(self, userId, courseId, threadId, topicId, responseId):
"""Get read status of a user's discussion thread response with
``GET /users/{userId}/courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/readStatus``
using OAuth1 or OAuth2 as a student.
:param userId: ID of the user.
:type userId: str
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSE_READSTATUS % (userId, courseId, threadId, topicId, responseId,)
return self.doGet(relativeUrl)
def updateThreadedDiscussionResponseReadStatus(self, userId, courseId, threadId,
topicId, responseId, readStatus):
"""Get read status of a user's discussion thread response with
``PUT /users/{userId}/courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/readStatus``
using OAuth1 or OAuth2 as a student.
:param userId: ID of the user.
:type userId: str
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:param readStatus: Read status Message.
:type readStatus: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_USERS_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSE_READSTATUS % (userId, courseId, threadId, topicId, responseId,)
return self.doPut(relativeUrl, body = readStatus)
def getThreadedDiscussionResponses(self, courseId, threadId, topicId, responseId):
"""Get responses to a specific discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responses``
using OAuth1 or OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSES % (courseId, threadId, topicId, responseId,)
return self.doGet(relativeUrl)
def createThreadedDiscussionResponse(self, courseId, threadId, topicId, response_id = None, response_message = ""):
"""Create a response to a specific discussion thread response with
``POST /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}/responses``
using OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:key response_id: ID of the response.
:type response_id: str
:key response_message: Response message to create.
:type response_message: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
responseId = response_id
responseMessage = response_message
if responseId == None: return self.__createThreadedDiscussionResponse(courseId, threadId, topicId, responseMessage)
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_RESPONSES % (courseId, threadId, topicId, responseId,)
return self.doPost(relativeUrl, body = responseMessage)
def __createThreadedDiscussionResponse(self, courseId, threadId, topicId, responseMessage):
"""Create a response to a specific discussion thread topic with
``POST /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses``
using OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseMessage: Response message to create.
:type responseMessage: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES % (courseId, threadId, topicId,)
return self.doPost(relativeUrl, body = responseMessage)
def updateThreadedDiscussionResponse(self, courseId, threadId, topicId, responseId, responseMessage):
"""Update a response to a specific discussion thread response with
``PUT /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}``
using OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseID: ID of the response.
:type responseID: str
:param responseMessage: Response message to create.
:type responseMessage: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_ % (courseId, threadId, topicId, responseId,)
return self.doPut(relativeUrl, body = responseMessage)
def getThreadedDiscussionResponse(self, courseId, threadId, topicId, responseId):
"""Get a specific discussion thread response with
``GET /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}``
using OAuth2 as a student, teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseId: ID of the response.
:type responseId: str
:param responseMessage: Response message to create.
:type responseMessage: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_ % (courseId, threadId, topicId, responseId,)
return self.doGet(relativeUrl)
def deleteThreadedDiscussionResponse(self, courseId, threadId, topicId, responseId):
"""Delete a specific discussion thread response with
``DELETE /courses/{courseId}/threadeddiscussions/{threadId}/topics/{topicId}/responses/{responseId}``
using OAuth1 or OAuth2 as a teacher, teaching assistant or admin.
:param courseId: ID of the course.
:type courseId: str
:param threadId: ID of the thread.
:type threadId: str
:param topicId: ID of the topic.
:type topicId: str
:param responseID: ID of the response.
:type responseID: str
:param responseMessage: Response message to create.
:type responseMessage: str
:returns: A :class:`learningstudio.core.Response` object with details of status and content.
"""
relativeUrl = PATH_COURSES_THREADEDDISCUSSIONS_TOPICS_RESPONSES_ % (courseId, threadId, topicId, responseId,)
return self.doDelete(relativeUrl)
def __getRelativePath(self, url):
relativeUrl = None
index = url.find(self.API_DOMAIN)
if index > -1:
index += len(self.API_DOMAIN)
relativeUrl = url[index:]
else :
index = url.find(".com")
if index > -1:
index += 4
relativeUrl = url[index:]
return relativeUrl;
| 48.149755
| 187
| 0.651966
| 5,966
| 58,839
| 6.332048
| 0.053637
| 0.019324
| 0.026127
| 0.0234
| 0.848612
| 0.834185
| 0.787596
| 0.750668
| 0.716706
| 0.672155
| 0
| 0.002371
| 0.268716
| 58,839
| 1,221
| 188
| 48.189189
| 0.875593
| 0.007019
| 0
| 0.309677
| 0
| 0.012903
| 0.096273
| 0.085388
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.009677
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a6aab4f6b272b830dbe0ea131a0598bea111da9
| 15,216
|
py
|
Python
|
tests/test_scorecard.py
|
nehaljwani/optbinning
|
ee98684f3d25e35cf5efa2ef724844397e851294
|
[
"Apache-2.0"
] | 1
|
2021-02-09T02:49:32.000Z
|
2021-02-09T02:49:32.000Z
|
tests/test_scorecard.py
|
nehaljwani/optbinning
|
ee98684f3d25e35cf5efa2ef724844397e851294
|
[
"Apache-2.0"
] | null | null | null |
tests/test_scorecard.py
|
nehaljwani/optbinning
|
ee98684f3d25e35cf5efa2ef724844397e851294
|
[
"Apache-2.0"
] | null | null | null |
"""
Scorecard testing.
"""
# Guillermo Navas-Palencia <g.navas.palencia@gmail.com>
# Copyright (C) 2020
import pandas as pd
import numpy as np
from pytest import approx, raises
from contextlib import redirect_stdout
from optbinning import BinningProcess
from optbinning import Scorecard
from sklearn.datasets import load_boston
from sklearn.datasets import load_breast_cancer
from sklearn.exceptions import NotFittedError
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import LogisticRegression
def test_params():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
with raises(TypeError):
scorecard = Scorecard(target=1, binning_process=binning_process,
estimator=estimator)
scorecard.fit(df)
with raises(TypeError):
scorecard = Scorecard(target="target", binning_process=estimator,
estimator=estimator)
scorecard.fit(df)
with raises(TypeError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=binning_process)
scorecard.fit(df)
with raises(ValueError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="new_method",
scaling_method_params=dict())
scorecard.fit(df)
with raises(ValueError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params=None)
scorecard.fit(df)
with raises(TypeError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params=[])
scorecard.fit(df)
with raises(TypeError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, intercept_based=1)
scorecard.fit(df)
with raises(TypeError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, reverse_scorecard=1)
scorecard.fit(df)
with raises(TypeError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, rounding=1)
scorecard.fit(df)
with raises(TypeError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, verbose=1)
scorecard.fit(df)
def test_scaling_method_params_continuous_pdo_odds():
data = load_boston()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
with raises(ValueError):
estimator = LinearRegression()
binning_process = BinningProcess(variable_names)
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="pdo_odds",
scaling_method_params={})
scorecard.fit(df)
def test_scaling_params():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
with raises(ValueError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="pdo_odds",
scaling_method_params={"pdo": 20})
scorecard.fit(df)
with raises(ValueError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="pdo_odds",
scaling_method_params={"pdo": 20, "odds": -2,
"scorecard_points": -22})
scorecard.fit(df)
with raises(ValueError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params={"min": "a", "max": 600})
scorecard.fit(df)
with raises(ValueError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params={"min": 900, "max": 600})
scorecard.fit(df)
def test_input():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
target = data.target
target[0] = 4
df["target"] = target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
with raises(ValueError):
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator)
scorecard.fit(df)
def test_default():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator).fit(df)
with raises(ValueError):
sct = scorecard.table(style="new")
sct = scorecard.table(style="summary")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(-43.65762593147646, rel=1e-6)
assert sc_max == approx(42.69694657427327, rel=1e-6)
def test_default_continuous():
data = load_boston()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LinearRegression()
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator).fit(df)
sct = scorecard.table(style="detailed")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(-15.813545796848476, rel=1e-6)
assert sc_max == approx(85.08156623609487, rel=1e-6)
def test_scaling_method_pdo_odd():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
odds = 1 / data.target.mean()
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scaling_method_params = {"pdo": 20, "odds": odds, "scorecard_points": 600}
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="pdo_odds",
scaling_method_params=scaling_method_params).fit(df)
sct = scorecard.table(style="summary")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(-612.2266586867094, rel=1e-6)
assert sc_max == approx(1879.4396115559216, rel=1e-6)
def test_scaling_method_min_max():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scaling_method_params = {"min": 300, "max": 850}
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params=scaling_method_params).fit(df)
sct = scorecard.table(style="summary")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(300, rel=1e-6)
assert sc_max == approx(850, rel=1e-6)
def test_intercept_based():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scaling_method_params = {"min": 300, "max": 850}
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params=scaling_method_params,
intercept_based=True).fit(df)
sct = scorecard.table(style="summary")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(300 - scorecard.intercept_, rel=1e-6)
assert sc_max == approx(850 - scorecard.intercept_, rel=1e-6)
def test_reverse_scorecard():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scaling_method_params = {"min": 300, "max": 850}
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params=scaling_method_params,
reverse_scorecard=True).fit(df)
sct = scorecard.table(style="summary")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(300, rel=1e-6)
assert sc_max == approx(850, rel=1e-6)
def test_rounding():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scaling_method_params = {"min": 200.52, "max": 850.66}
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params=scaling_method_params,
rounding=True).fit(df)
sct = scorecard.table(style="summary")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(201, rel=1e-6)
assert sc_max == approx(851, rel=1e-6)
def test_rounding_pdo_odds():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
odds = 1 / data.target.mean()
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scaling_method_params = {"pdo": 20, "odds": odds, "scorecard_points": 600}
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="pdo_odds",
scaling_method_params=scaling_method_params,
rounding=True).fit(df)
sct = scorecard.table(style="summary")
sc_min, sc_max = sct.groupby("Variable").agg(
{'Points': [np.min, np.max]}).sum()
assert sc_min == approx(-612, rel=1e-6)
assert sc_max == approx(1880, rel=1e-6)
def test_estimator_not_coef():
from sklearn.ensemble import RandomForestClassifier
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = RandomForestClassifier()
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator)
with raises(RuntimeError):
scorecard.fit(df)
def test_predict_score():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scaling_method_params = {"min": 300.12, "max": 850.66}
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, scaling_method="min_max",
scaling_method_params=scaling_method_params)
with raises(NotFittedError):
pred = scorecard.predict(df)
with raises(NotFittedError):
pred_proba = scorecard.predict_proba(df)
with raises(NotFittedError):
score = scorecard.score(df)
scorecard.fit(df)
pred = scorecard.predict(df)
pred_proba = scorecard.predict_proba(df)
score = scorecard.score(df)
assert pred[:5] == approx([0, 0, 0, 0, 0])
assert pred_proba[:5, 1] == approx(
[1.15260206e-06, 9.79035720e-06, 7.52481206e-08, 1.12438599e-03,
9.83145644e-06], rel=1e-6)
assert score[:5] == approx([652.16590046, 638.52659074, 669.56413105,
608.27744027, 638.49988325], rel=1e-6)
def test_information():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator)
with raises(NotFittedError):
scorecard.information()
scorecard.fit(df)
with raises(ValueError):
scorecard.information(print_level=-1)
with open("tests/test_scorecard_information.txt", "w") as f:
with redirect_stdout(f):
scorecard.information(print_level=0)
scorecard.information(print_level=1)
scorecard.information(print_level=2)
def test_verbose():
data = load_breast_cancer()
variable_names = data.feature_names
df = pd.DataFrame(data.data, columns=variable_names)
df["target"] = data.target
binning_process = BinningProcess(variable_names)
estimator = LogisticRegression()
scorecard = Scorecard(target="target", binning_process=binning_process,
estimator=estimator, verbose=True)
with open("tests/test_scorecard_verbose.txt", "w") as f:
with redirect_stdout(f):
scorecard.fit(df)
| 34.660592
| 79
| 0.650565
| 1,677
| 15,216
| 5.692904
| 0.09362
| 0.105583
| 0.083796
| 0.076254
| 0.850215
| 0.805279
| 0.783492
| 0.761496
| 0.758772
| 0.747041
| 0
| 0.031621
| 0.241391
| 15,216
| 438
| 80
| 34.739726
| 0.79546
| 0.006046
| 0
| 0.737342
| 0
| 0
| 0.048164
| 0.004499
| 0
| 0
| 0
| 0
| 0.060127
| 1
| 0.050633
| false
| 0
| 0.037975
| 0
| 0.088608
| 0.012658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6abda239daff253a89a8c12d277464d34caed46f
| 19,584
|
py
|
Python
|
tests/test_triggear_heart.py
|
futuresimple/triggear
|
d6b8511ba8550225e7c34bd52232327b2b89d972
|
[
"MIT"
] | 14
|
2017-08-17T16:48:26.000Z
|
2019-07-10T12:11:49.000Z
|
tests/test_triggear_heart.py
|
futuresimple/triggear
|
d6b8511ba8550225e7c34bd52232327b2b89d972
|
[
"MIT"
] | null | null | null |
tests/test_triggear_heart.py
|
futuresimple/triggear
|
d6b8511ba8550225e7c34bd52232327b2b89d972
|
[
"MIT"
] | null | null | null |
import asyncio
import logging
import pytest
from mockito import mock, expect, when, captor
from app.clients.async_client import AsyncClientNotFoundException, AsyncClientException
from app.clients.github_client import GithubClient
from app.clients.jenkins_client import JenkinsClient
from app.clients.jenkinses_clients import JenkinsesClients
from app.clients.mongo_client import MongoClient
from app.hook_details.hook_details import HookDetails
from app.hook_details.hook_params_parser import HookParamsParser
from app.mongo.registration_cursor import RegistrationCursor
from app.triggear_heart import TriggearHeart
from tests.async_mockito import async_iter, async_value
pytestmark = pytest.mark.asyncio
@pytest.mark.usefixtures('unstub')
class TestTriggearHeart:
async def test__when_job_does_not_exist__it_should_have_missed_times_field_incremented(self):
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
jenkins_client: JenkinsClient = mock(spec=JenkinsClient, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path'},
spec=RegistrationCursor,
strict=True
)
when(mongo_client).get_registered_jobs(hook_details).thenReturn(async_iter(registration_cursor))
when(hook_details).should_trigger(registration_cursor, github_client).thenReturn(async_value(True))
when(jenkinses_clients).get_jenkins('url').thenReturn(jenkins_client)
when(jenkins_client).get_job_info('job_path').thenRaise(AsyncClientNotFoundException('Job not found'))
expect(hook_details).get_query()
expect(mongo_client, strict=True, times=1).increment_missed_counter(hook_details, registration_cursor).thenReturn(async_value(None))
# when
await TriggearHeart(mongo_client, github_client, jenkinses_clients).trigger_registered_jobs(hook_details)
async def test__when_job_does_exist__it_should_be_triggered(self):
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
jenkins_client: JenkinsClient = mock(spec=JenkinsClient, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path'},
spec=RegistrationCursor,
strict=True
)
when(mongo_client).get_registered_jobs(hook_details).thenReturn(async_iter(registration_cursor))
when(hook_details).should_trigger(registration_cursor, github_client).thenReturn(async_value(True))
when(jenkinses_clients).get_jenkins('url').thenReturn(jenkins_client)
when(jenkins_client).get_job_info('job_path').thenReturn(async_value({}))
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(triggear_heart).trigger_registered_job(hook_details, registration_cursor).thenReturn(async_value(None))
# when
await triggear_heart.trigger_registered_jobs(hook_details)
async def test__when_job_should_not_be_triggered__warning_is_displayed(self):
mock(logging, strict=True)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path'},
spec=RegistrationCursor,
strict=True
)
when(mongo_client).get_registered_jobs(hook_details).thenReturn(async_iter(registration_cursor))
when(hook_details).should_trigger(registration_cursor, github_client).thenReturn(async_value(False))
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(triggear_heart, times=0).trigger_registered_job(hook_details, registration_cursor).thenReturn(async_value(None))
arg_captor = captor()
expect(logging).warning(arg_captor)
# when
await triggear_heart.trigger_registered_jobs(hook_details)
assert isinstance(arg_captor.value, str)
assert 'will not be run due to unmet registration restrictions in' in arg_captor.value
async def test__when_job_url_is_none__not_found_status_is_not_reported(self):
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path'},
spec=RegistrationCursor,
strict=True
)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(github_client, times=0).create_github_build_status(any, any, any, any, any, any)
await triggear_heart.report_not_found_build_to_github(hook_details, registration_cursor, None, 3)
async def test__when_job_url_is_none__unaccepted_params_status_is_not_reported(self):
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path'},
spec=RegistrationCursor,
strict=True
)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(github_client, times=0).create_github_build_status(any, any, any, any, any, any)
await triggear_heart.report_unaccepted_parameters_to_github(hook_details, registration_cursor, None, 3, {})
async def test__when_job_url_is_not_none__not_found_status_is_reported(self):
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path', 'repo': 'repo'},
spec=RegistrationCursor,
strict=True
)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(hook_details).get_ref().thenReturn('ref')
expect(github_client).create_github_build_status(repo='repo',
sha='ref',
state="error",
url='url',
description="Triggear cant find build url:job_path #3",
context='job_path').thenReturn(async_value(None))
await triggear_heart.report_not_found_build_to_github(hook_details, registration_cursor, 'url', 3)
async def test__when_job_url_is_not_none__unaccepted_params_status_is_reported(self):
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path', 'repo': 'repo'},
spec=RegistrationCursor,
strict=True
)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(hook_details).get_ref().thenReturn('ref')
expect(github_client).create_github_build_status(repo='repo',
sha='ref',
state="error",
url='url',
description="Job url:job_path did not accept requested parameters None",
context='job_path').thenReturn(async_value(None))
await triggear_heart.report_unaccepted_parameters_to_github(hook_details, registration_cursor, 'url', 3, None)
async def test__trigger_registered_job__success_flow(self):
mock(HookParamsParser)
mock(asyncio)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path', 'repo': 'repo'},
spec=RegistrationCursor,
strict=True
)
jenkins_client: JenkinsClient = mock(spec=JenkinsClient, strict=True)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(hook_details).setup_final_param_values(registration_cursor)
expect(HookParamsParser).get_requested_parameters_values(hook_details, registration_cursor).thenReturn({})
expect(jenkinses_clients).get_jenkins('url').thenReturn(jenkins_client)
expect(jenkins_client).get_jobs_next_build_number('job_path').thenReturn(async_value(3))
expect(jenkins_client).get_job_url('job_path').thenReturn(async_value('job_url'))
expect(jenkins_client).build_jenkins_job('job_path', {}).thenReturn(async_value(None))
expect(jenkins_client).get_build_info_data('job_path', 3)\
.thenReturn(async_value({'url': 'job_url'}))\
.thenReturn(async_value({'url': 'job_url', 'result': 'SUCCESS'}))
expect(hook_details).get_ref().thenReturn('ref')
expect(github_client).create_github_build_status(repo='repo',
sha='ref',
state='pending',
url='job_url',
description='build in progress',
context='job_path').thenReturn(async_value(None))
expect(jenkins_client).is_job_building('job_path', 3).thenReturn(async_value(True)).thenReturn(async_value(False))
expect(asyncio).sleep(1).thenReturn(async_value(None))
expect(github_client).create_github_build_status(repo='repo',
sha='ref',
state='success',
url='job_url',
description='build succeeded',
context='job_path').thenReturn(async_value(None))
await triggear_heart.trigger_registered_job(hook_details, registration_cursor)
async def test__trigger_registered_job__when_build_job_raises__status_is_reported(self):
mock(HookParamsParser)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path', 'repo': 'repo'},
spec=RegistrationCursor,
strict=True
)
jenkins_client: JenkinsClient = mock(spec=JenkinsClient, strict=True)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(hook_details).setup_final_param_values(registration_cursor)
expect(HookParamsParser).get_requested_parameters_values(hook_details, registration_cursor).thenReturn({})
expect(jenkinses_clients).get_jenkins('url').thenReturn(jenkins_client)
expect(jenkins_client).get_jobs_next_build_number('job_path').thenReturn(async_value(3))
expect(jenkins_client).get_job_url('job_path').thenReturn(async_value('job_url'))
expect(jenkins_client).build_jenkins_job('job_path', {}).thenRaise(AsyncClientException('terrible', 500))
expect(triggear_heart).report_unaccepted_parameters_to_github(hook_details, registration_cursor, 'job_url', 3, {})\
.thenReturn(async_value(None))
await triggear_heart.trigger_registered_job(hook_details, registration_cursor)
async def test__trigger_registered_job__when_build_info_is_none__status_is_reported(self):
mock(HookParamsParser)
mock(asyncio)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path', 'repo': 'repo'},
spec=RegistrationCursor,
strict=True
)
jenkins_client: JenkinsClient = mock(spec=JenkinsClient, strict=True)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(hook_details).setup_final_param_values(registration_cursor)
expect(HookParamsParser).get_requested_parameters_values(hook_details, registration_cursor).thenReturn({})
expect(jenkinses_clients).get_jenkins('url').thenReturn(jenkins_client)
expect(jenkins_client).get_jobs_next_build_number('job_path').thenReturn(async_value(3))
expect(jenkins_client).get_job_url('job_path').thenReturn(async_value('job_url'))
expect(jenkins_client).build_jenkins_job('job_path', {}).thenReturn(async_value(None))
expect(jenkins_client).get_build_info_data('job_path', 3)\
.thenReturn(async_value(None))
expect(hook_details).get_ref().thenReturn('ref')
expect(github_client).create_github_build_status(repo='repo',
sha='ref',
state='error',
url='job_url',
description='Triggear cant find build url:job_path #3',
context='job_path').thenReturn(async_value(None))
await triggear_heart.trigger_registered_job(hook_details, registration_cursor)
async def test__trigger_registered_job__second_build_info_is_none(self):
mock(HookParamsParser)
mock(asyncio)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path', 'repo': 'repo'},
spec=RegistrationCursor,
strict=True
)
jenkins_client: JenkinsClient = mock(spec=JenkinsClient, strict=True)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(hook_details).setup_final_param_values(registration_cursor)
expect(HookParamsParser).get_requested_parameters_values(hook_details, registration_cursor).thenReturn({})
expect(jenkinses_clients).get_jenkins('url').thenReturn(jenkins_client)
expect(jenkins_client).get_jobs_next_build_number('job_path').thenReturn(async_value(3))
expect(jenkins_client).get_job_url('job_path').thenReturn(async_value('job_url'))
expect(jenkins_client).build_jenkins_job('job_path', {}).thenReturn(async_value(None))
expect(jenkins_client).get_build_info_data('job_path', 3)\
.thenReturn(async_value({'url': 'job_url'}))\
.thenReturn(async_value(None))
expect(hook_details).get_ref().thenReturn('ref')
expect(github_client).create_github_build_status(repo='repo',
sha='ref',
state='pending',
url='job_url',
description='build in progress',
context='job_path').thenReturn(async_value(None))
expect(jenkins_client).is_job_building('job_path', 3).thenReturn(async_value(True)).thenReturn(async_value(False))
expect(asyncio).sleep(1).thenReturn(async_value(None))
await triggear_heart.trigger_registered_job(hook_details, registration_cursor)
async def test__trigger_registered_job__next_build_number_is_not_available(self):
mock(HookParamsParser)
mock(asyncio)
hook_details: HookDetails = mock(spec=HookDetails, strict=True)
registration_cursor: RegistrationCursor = mock(
{'jenkins_url': 'url', 'job_name': 'job_path', 'repo': 'repo'},
spec=RegistrationCursor,
strict=True
)
jenkins_client: JenkinsClient = mock(spec=JenkinsClient, strict=True)
mongo_client: MongoClient = mock(spec=MongoClient, strict=True)
jenkinses_clients: JenkinsesClients = mock(spec=JenkinsesClients, strict=True)
github_client: GithubClient = mock(spec=GithubClient, strict=True)
triggear_heart = TriggearHeart(mongo_client, github_client, jenkinses_clients)
expect(hook_details).setup_final_param_values(registration_cursor)
expect(HookParamsParser).get_requested_parameters_values(hook_details, registration_cursor).thenReturn({})
expect(jenkinses_clients).get_jenkins('url').thenReturn(jenkins_client)
expect(jenkins_client).get_jobs_next_build_number('job_path').thenRaise(KeyError())
expect(jenkins_client).build_jenkins_job('job_path', {}).thenReturn(async_value(None))
await triggear_heart.trigger_registered_job(hook_details, registration_cursor)
| 58.285714
| 140
| 0.672028
| 2,053
| 19,584
| 6.070141
| 0.067706
| 0.055368
| 0.059381
| 0.033542
| 0.899374
| 0.8883
| 0.87835
| 0.874178
| 0.871048
| 0.865672
| 0
| 0.001669
| 0.234988
| 19,584
| 335
| 141
| 58.459701
| 0.830129
| 0.000715
| 0
| 0.758007
| 0
| 0
| 0.057958
| 0
| 0
| 0
| 0
| 0
| 0.007117
| 1
| 0
| false
| 0
| 0.049822
| 0
| 0.053381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ad7ca0fb95153a08764d46dbe1d38ea936fc1c8
| 550
|
py
|
Python
|
eval_medseg_timm-regnetx_002_ElasticTransform.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_medseg_timm-regnetx_002_ElasticTransform.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_medseg_timm-regnetx_002_ElasticTransform.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_0_ElasticTransform.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_1_ElasticTransform.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_2_ElasticTransform.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_3_ElasticTransform.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_4_ElasticTransform.yml",
]
for l in ls:
os.system(l)
| 50
| 104
| 0.849091
| 80
| 550
| 5.4625
| 0.3
| 0.114416
| 0.1373
| 0.217391
| 0.883295
| 0.883295
| 0.883295
| 0.883295
| 0.883295
| 0.883295
| 0
| 0.038536
| 0.056364
| 550
| 11
| 105
| 50
| 0.803468
| 0
| 0
| 0
| 0
| 0
| 0.880218
| 0.653358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6aec6d15efca6df5de45497bf618ed17c05ebef4
| 3,461
|
py
|
Python
|
WebMirror/management/rss_parser_funcs/feed_parse_extractNutty.py
|
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 193
|
2016-08-02T22:04:35.000Z
|
2022-03-09T20:45:41.000Z
|
WebMirror/management/rss_parser_funcs/feed_parse_extractNutty.py
|
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 533
|
2016-08-23T20:48:23.000Z
|
2022-03-28T15:55:13.000Z
|
WebMirror/management/rss_parser_funcs/feed_parse_extractNutty.py
|
rrosajp/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 19
|
2015-08-13T18:01:08.000Z
|
2021-07-12T17:13:09.000Z
|
def extractNutty(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
if 'A Mistaken Marriage Match' in item['tags'] and 'a generation of military counselor' in item['tags']:
return buildReleaseMessageWithType(item, 'A mistaken marriage match: A generation of military counselor', vol, chp, frag=frag, postfix=postfix)
if 'A Mistaken Marriage Match' in item['tags'] and 'a-generation-of-military-counselor-' in item['linkUrl']:
return buildReleaseMessageWithType(item, 'A mistaken marriage match: A generation of military counselor', vol, chp, frag=frag, postfix=postfix)
if 'A Mistaken Marriage Match' in item['tags'] and 'Record of Washed Grievances Chapter' in item['title']:
return buildReleaseMessageWithType(item, 'A mistaken marriage match: Record of washed grievances', vol, chp, frag=frag, postfix=postfix)
if 'A Mistaken Marriage Match' in item['tags'] and 'record-of-washed-grievances' in item['linkUrl']:
return buildReleaseMessageWithType(item, 'A mistaken marriage match: Record of washed grievances', vol, chp, frag=frag, postfix=postfix)
if 'A Mistaken Marriage Match' in item['tags'] and 'the-general-only-fears-the-maidens-escape' in item['linkUrl']:
return buildReleaseMessageWithType(item, 'A mistaken marriage match: The General Only Fears the Maiden\'s Escape', vol, chp, frag=frag, postfix=postfix)
if 'A Mistaken Marriage Match' in item['tags'] and '/the-general-only-fear-the-maidens-escape-chapter' in item['linkUrl']:
return buildReleaseMessageWithType(item, 'A mistaken marriage match: The General Only Fears the Maiden\'s Escape', vol, chp, frag=frag, postfix=postfix)
if 'A Mistaken Marriage Match' in item['tags'] and '/destined-marriage-with-fragrance-chapter-' in item['linkUrl']:
return buildReleaseMessageWithType(item, 'A mistaken marriage match: Destined Marriage With Fragrance', vol, chp, frag=frag, postfix=postfix)
if 'Destined Marriages With Fragrance Chapter' in item['title']:
return buildReleaseMessageWithType(item, 'Destined Marriage with Fragrance', vol, chp, frag=frag, postfix=postfix)
if item['tags'] == ['A Mistaken Marriage Match']:
titlemap = [
('DMSJ Chapter ', 'A mistaken marriage match: Destined Marriage Of Shang Jun', 'translated'),
('Destined Marriage Shang Jun: Chapter ', 'A mistaken marriage match: Destined Marriage Of Shang Jun', 'translated'),
('Destined Marriage Of Shang Jun: Chapter ', 'A mistaken marriage match: Destined Marriage Of Shang Jun', 'translated'),
('DMSJ: Ch ', 'A mistaken marriage match: Destined Marriage Of Shang Jun', 'translated'),
('DMSJ: Chapter ', 'A mistaken marriage match: Destined Marriage Of Shang Jun', 'translated'),
('Destined Marriage With Fragrance ', 'A mistaken marriage match: Destined Marriage With Fragrance', 'translated'),
('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'),
('Master of Dungeon', 'Master of Dungeon', 'oel'),
]
for titlecomponent, name, tl_type in titlemap:
if titlecomponent.lower() in item['title'].lower():
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| 73.638298
| 154
| 0.701242
| 431
| 3,461
| 5.62413
| 0.150812
| 0.07797
| 0.147277
| 0.190594
| 0.818069
| 0.785066
| 0.773515
| 0.743399
| 0.72236
| 0.72236
| 0
| 0
| 0.185207
| 3,461
| 47
| 155
| 73.638298
| 0.859574
| 0
| 0
| 0.171429
| 0
| 0
| 0.492036
| 0.056183
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0
| 0
| 0.342857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a793afff0abf0102307b1aea2bc5f4c1b28fdfa
| 10,907
|
py
|
Python
|
tests/integration/proxy/v1/service/session/test_participant.py
|
fefi95/twilio-python
|
b9bfea293b6133fe84d4d8d3ac4e2a75381c3881
|
[
"MIT"
] | 1
|
2019-12-30T21:46:55.000Z
|
2019-12-30T21:46:55.000Z
|
tests/integration/proxy/v1/service/session/test_participant.py
|
fefi95/twilio-python
|
b9bfea293b6133fe84d4d8d3ac4e2a75381c3881
|
[
"MIT"
] | null | null | null |
tests/integration/proxy/v1/service/session/test_participant.py
|
fefi95/twilio-python
|
b9bfea293b6133fe84d4d8d3ac4e2a75381c3881
|
[
"MIT"
] | null | null | null |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class ParticipantTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(sid="KPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://proxy.twilio.com/v1/Services/KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Sessions/KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants/KPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"session_sid": "KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identifier": "+14155551212",
"proxy_identifier": "+14155559999",
"proxy_identifier_sid": "PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"date_deleted": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"date_created": "2015-07-30T20:00:00Z",
"url": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"message_interactions": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/MessageInteractions"
}
}
'''
))
actual = self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(sid="KPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_fetch_channel_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"session_sid": "KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identifier": "messenger:14155551212",
"proxy_identifier": "messenger:14155559999",
"proxy_identifier_sid": "XEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "a facebook user",
"date_deleted": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"date_created": "2015-07-30T20:00:00Z",
"url": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"message_interactions": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/MessageInteractions"
}
}
'''
))
actual = self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(sid="KPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.list()
self.holodeck.assert_has_request(Request(
'get',
'https://proxy.twilio.com/v1/Services/KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Sessions/KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"previous_page_url": null,
"next_page_url": null,
"url": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants?PageSize=50&Page=0",
"page": 0,
"first_page_url": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants?PageSize=50&Page=0",
"page_size": 50,
"key": "participants"
},
"participants": []
}
'''
))
actual = self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.list()
self.assertIsNotNone(actual)
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.create(identifier="identifier")
values = {'Identifier': "identifier", }
self.holodeck.assert_has_request(Request(
'post',
'https://proxy.twilio.com/v1/Services/KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Sessions/KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants',
data=values,
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"session_sid": "KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identifier": "+14155551212",
"proxy_identifier": "+14155559999",
"proxy_identifier_sid": "PNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"date_deleted": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"date_created": "2015-07-30T20:00:00Z",
"url": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"message_interactions": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/MessageInteractions"
}
}
'''
))
actual = self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.create(identifier="identifier")
self.assertIsNotNone(actual)
def test_create_channel_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"session_sid": "KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identifier": "messenger:123456",
"proxy_identifier": "messenger:987654532",
"proxy_identifier_sid": "XEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "a facebook user",
"date_deleted": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"date_created": "2015-07-30T20:00:00Z",
"url": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"links": {
"message_interactions": "https://proxy.twilio.com/v1/Services/KSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Sessions/KCaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Participants/KPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/MessageInteractions"
}
}
'''
))
actual = self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants.create(identifier="identifier")
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(sid="KPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://proxy.twilio.com/v1/Services/KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Sessions/KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Participants/KPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.proxy.v1.services(sid="KSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sessions(sid="KCXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.participants(sid="KPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.assertTrue(actual)
| 47.837719
| 229
| 0.610709
| 734
| 10,907
| 8.948229
| 0.145777
| 0.036541
| 0.034105
| 0.040499
| 0.919762
| 0.919762
| 0.898143
| 0.890073
| 0.890073
| 0.890073
| 0
| 0.040534
| 0.28523
| 10,907
| 227
| 230
| 48.048458
| 0.80195
| 0.009994
| 0
| 0.762887
| 1
| 0.020619
| 0.279113
| 0.158375
| 0
| 0
| 0
| 0
| 0.14433
| 1
| 0.103093
| false
| 0
| 0.041237
| 0
| 0.154639
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0aa0480e8215a283e8c5a0fbca27a9eb4b220f29
| 85,771
|
py
|
Python
|
cloudmersive_convert_api_client/api/merge_document_api.py
|
Cloudmersive/Cloudmersive.APIClient.Python.Convert
|
dba2fe7257229ebdacd266531b3724552c651009
|
[
"Apache-2.0"
] | 3
|
2018-07-25T23:04:34.000Z
|
2021-08-10T16:43:10.000Z
|
cloudmersive_convert_api_client/api/merge_document_api.py
|
Cloudmersive/Cloudmersive.APIClient.Python.Convert
|
dba2fe7257229ebdacd266531b3724552c651009
|
[
"Apache-2.0"
] | 3
|
2020-11-23T10:46:48.000Z
|
2021-12-30T14:09:34.000Z
|
cloudmersive_convert_api_client/api/merge_document_api.py
|
Cloudmersive/Cloudmersive.APIClient.Python.Convert
|
dba2fe7257229ebdacd266531b3724552c651009
|
[
"Apache-2.0"
] | 2
|
2020-01-07T09:48:01.000Z
|
2020-11-23T10:47:00.000Z
|
# coding: utf-8
"""
convertapi
Convert API lets you effortlessly convert file formats and types. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from cloudmersive_convert_api_client.api_client import ApiClient
class MergeDocumentApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def merge_document_docx(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two Word DOCX Together # noqa: E501
Combine two Office Word Documents (docx) into one single Office Word document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_docx(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_docx_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_docx_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_docx_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two Word DOCX Together # noqa: E501
Combine two Office Word Documents (docx) into one single Office Word document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_docx_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_docx" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_docx`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_docx`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/docx', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_docx_multi(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple Word DOCX Together # noqa: E501
Combine multiple Office Word Documents (docx) into one single Office Word document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_docx_multi(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_docx_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_docx_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_docx_multi_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple Word DOCX Together # noqa: E501
Combine multiple Office Word Documents (docx) into one single Office Word document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_docx_multi_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_docx_multi" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_docx_multi`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_docx_multi`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/docx/multi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_html(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two HTML (HTM) Files Together # noqa: E501
Combine two HTML (.HTM) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. The title will be taken from the first document. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_html(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_html_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_html_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_html_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two HTML (HTM) Files Together # noqa: E501
Combine two HTML (.HTM) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. The title will be taken from the first document. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_html_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_html" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_html`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_html`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/html', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_html_multi(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple HTML (HTM) Files Together # noqa: E501
Combine multiple HTML (.HTM) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. The title will be taken from the first document. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_html_multi(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_html_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_html_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_html_multi_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple HTML (HTM) Files Together # noqa: E501
Combine multiple HTML (.HTM) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. The title will be taken from the first document. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_html_multi_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_html_multi" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_html_multi`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_html_multi`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/html/multi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_pdf(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two PDF Files Together # noqa: E501
Combine two PDF files (pdf) into a single PDF document, preserving the order of the input documents in the combined document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pdf(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_pdf_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_pdf_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_pdf_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two PDF Files Together # noqa: E501
Combine two PDF files (pdf) into a single PDF document, preserving the order of the input documents in the combined document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pdf_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_pdf" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_pdf`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_pdf`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/pdf', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_pdf_multi(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple PDF Files Together # noqa: E501
Combine multiple PDF files (pdf) into a single PDF document, preserving the order of the input documents in the combined document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pdf_multi(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_pdf_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_pdf_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_pdf_multi_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple PDF Files Together # noqa: E501
Combine multiple PDF files (pdf) into a single PDF document, preserving the order of the input documents in the combined document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pdf_multi_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_pdf_multi" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_pdf_multi`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_pdf_multi`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/pdf/multi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_png(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two PNG Files Together # noqa: E501
Combine two PNG files into a single PNG document, preserving the order of the input documents in the combined document by stacking them vertically # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_png(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_png_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_png_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_png_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two PNG Files Together # noqa: E501
Combine two PNG files into a single PNG document, preserving the order of the input documents in the combined document by stacking them vertically # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_png_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_png" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_png`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_png`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/png/vertical', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_png_multi(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple PNG Files Together # noqa: E501
Combine multiple PNG files into a single PNG document, preserving the order of the input documents in the combined document by stacking them vertically # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_png_multi(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_png_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_png_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_png_multi_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple PNG Files Together # noqa: E501
Combine multiple PNG files into a single PNG document, preserving the order of the input documents in the combined document by stacking them vertically # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_png_multi_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_png_multi" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_png_multi`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_png_multi`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/png/vertical/multi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_pptx(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two PowerPoint PPTX Together # noqa: E501
Combine two Office PowerPoint presentations (pptx) into one single Office PowerPoint presentation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pptx(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_pptx_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_pptx_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_pptx_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two PowerPoint PPTX Together # noqa: E501
Combine two Office PowerPoint presentations (pptx) into one single Office PowerPoint presentation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pptx_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_pptx" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_pptx`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_pptx`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/pptx', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_pptx_multi(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple PowerPoint PPTX Together # noqa: E501
Combine multiple Office PowerPoint presentations (pptx) into one single Office PowerPoint presentation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pptx_multi(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_pptx_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_pptx_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_pptx_multi_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple PowerPoint PPTX Together # noqa: E501
Combine multiple Office PowerPoint presentations (pptx) into one single Office PowerPoint presentation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_pptx_multi_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_pptx_multi" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_pptx_multi`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_pptx_multi`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/pptx/multi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_txt(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two Text (TXT) Files Together # noqa: E501
Combine two Text (.TXT) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_txt(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_txt_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_txt_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_txt_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two Text (TXT) Files Together # noqa: E501
Combine two Text (.TXT) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_txt_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_txt" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_txt`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_txt`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/txt', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_txt_multi(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple Text (TXT) Files Together # noqa: E501
Combine multiple Text (.TXT) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_txt_multi(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_txt_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_txt_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_txt_multi_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple Text (TXT) Files Together # noqa: E501
Combine multiple Text (.TXT) files into a single text document, preserving the order of the input documents in the combined document by stacking them vertically. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_txt_multi_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_txt_multi" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_txt_multi`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_txt_multi`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/txt/multi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_xlsx(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two Excel XLSX Together # noqa: E501
Combine two Office Excel spreadsheets (xlsx) into a single Office Excel spreadsheet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_xlsx(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_xlsx_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_xlsx_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_xlsx_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Two Excel XLSX Together # noqa: E501
Combine two Office Excel spreadsheets (xlsx) into a single Office Excel spreadsheet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_xlsx_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on (more than 2 can be supplied). (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_xlsx" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_xlsx`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_xlsx`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/xlsx', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def merge_document_xlsx_multi(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple Excel XLSX Together # noqa: E501
Combine multiple Office Excel spreadsheets (xlsx) into a single Office Excel spreadsheet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_xlsx_multi(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.merge_document_xlsx_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
else:
(data) = self.merge_document_xlsx_multi_with_http_info(input_file1, input_file2, **kwargs) # noqa: E501
return data
def merge_document_xlsx_multi_with_http_info(self, input_file1, input_file2, **kwargs): # noqa: E501
"""Merge Multple Excel XLSX Together # noqa: E501
Combine multiple Office Excel spreadsheets (xlsx) into a single Office Excel spreadsheet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.merge_document_xlsx_multi_with_http_info(input_file1, input_file2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file input_file1: First input file to perform the operation on. (required)
:param file input_file2: Second input file to perform the operation on. (required)
:param file input_file3: Third input file to perform the operation on.
:param file input_file4: Fourth input file to perform the operation on.
:param file input_file5: Fifth input file to perform the operation on.
:param file input_file6: Sixth input file to perform the operation on.
:param file input_file7: Seventh input file to perform the operation on.
:param file input_file8: Eighth input file to perform the operation on.
:param file input_file9: Ninth input file to perform the operation on.
:param file input_file10: Tenth input file to perform the operation on.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['input_file1', 'input_file2', 'input_file3', 'input_file4', 'input_file5', 'input_file6', 'input_file7', 'input_file8', 'input_file9', 'input_file10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method merge_document_xlsx_multi" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'input_file1' is set
if ('input_file1' not in params or
params['input_file1'] is None):
raise ValueError("Missing the required parameter `input_file1` when calling `merge_document_xlsx_multi`") # noqa: E501
# verify the required parameter 'input_file2' is set
if ('input_file2' not in params or
params['input_file2'] is None):
raise ValueError("Missing the required parameter `input_file2` when calling `merge_document_xlsx_multi`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'input_file1' in params:
local_var_files['inputFile1'] = params['input_file1'] # noqa: E501
if 'input_file2' in params:
local_var_files['inputFile2'] = params['input_file2'] # noqa: E501
if 'input_file3' in params:
local_var_files['inputFile3'] = params['input_file3'] # noqa: E501
if 'input_file4' in params:
local_var_files['inputFile4'] = params['input_file4'] # noqa: E501
if 'input_file5' in params:
local_var_files['inputFile5'] = params['input_file5'] # noqa: E501
if 'input_file6' in params:
local_var_files['inputFile6'] = params['input_file6'] # noqa: E501
if 'input_file7' in params:
local_var_files['inputFile7'] = params['input_file7'] # noqa: E501
if 'input_file8' in params:
local_var_files['inputFile8'] = params['input_file8'] # noqa: E501
if 'input_file9' in params:
local_var_files['inputFile9'] = params['input_file9'] # noqa: E501
if 'input_file10' in params:
local_var_files['inputFile10'] = params['input_file10'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Apikey'] # noqa: E501
return self.api_client.call_api(
'/convert/merge/xlsx/multi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.816733
| 233
| 0.643376
| 10,643
| 85,771
| 4.954994
| 0.021329
| 0.046875
| 0.0446
| 0.057342
| 0.990765
| 0.990689
| 0.990689
| 0.989248
| 0.989248
| 0.989248
| 0
| 0.028452
| 0.273053
| 85,771
| 1,756
| 234
| 48.844533
| 0.817341
| 0.398107
| 0
| 0.856389
| 1
| 0
| 0.237185
| 0.045892
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030623
| false
| 0
| 0.004224
| 0
| 0.080253
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0aaf4d1384e974bfa228df8b2ae0e2a97264e7fc
| 165
|
py
|
Python
|
src/amuse/support/data/particles.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 131
|
2015-06-04T09:06:57.000Z
|
2022-02-01T12:11:29.000Z
|
src/amuse/support/data/particles.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 690
|
2015-10-17T12:18:08.000Z
|
2022-03-31T16:15:58.000Z
|
src/amuse/support/data/particles.py
|
rieder/amuse
|
3ac3b6b8f922643657279ddee5c8ab3fc0440d5e
|
[
"Apache-2.0"
] | 102
|
2015-01-22T10:00:29.000Z
|
2022-02-09T13:29:43.000Z
|
import warnings
from amuse.datamodel.particles import *
warnings.warn("amuse.support.data.particles has moved to amuse.datamodel.particles", DeprecationWarning)
| 20.625
| 104
| 0.818182
| 20
| 165
| 6.75
| 0.65
| 0.207407
| 0.340741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09697
| 165
| 7
| 105
| 23.571429
| 0.90604
| 0
| 0
| 0
| 0
| 0
| 0.411043
| 0.325153
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0ab1e4f45233efa7a67cfedf31fb59f9de0e8546
| 173
|
py
|
Python
|
cvxpy/reductions/dgp2dcp/atom_canonicalizers/one_minus_pos_canon.py
|
QiuWJX/cvxpy
|
fd1c225b0cdf541618e292cae1a4c7ea25ddc934
|
[
"ECL-2.0",
"Apache-2.0"
] | 3,285
|
2015-01-03T04:02:29.000Z
|
2021-04-19T14:51:29.000Z
|
cvxpy/reductions/dgp2dcp/atom_canonicalizers/one_minus_pos_canon.py
|
QiuWJX/cvxpy
|
fd1c225b0cdf541618e292cae1a4c7ea25ddc934
|
[
"ECL-2.0",
"Apache-2.0"
] | 1,138
|
2015-01-01T19:40:14.000Z
|
2021-04-18T23:37:31.000Z
|
cvxpy/reductions/dgp2dcp/atom_canonicalizers/one_minus_pos_canon.py
|
phschiele/cvxpy
|
a43aed7447b87f6d0fbc6f71ae5c7b84183f3369
|
[
"ECL-2.0",
"Apache-2.0"
] | 765
|
2015-01-02T19:29:39.000Z
|
2021-04-20T00:50:43.000Z
|
from cvxpy.atoms.elementwise.exp import exp
from cvxpy.atoms.elementwise.log import log
def one_minus_pos_canon(expr, args):
return log(expr._ones - exp(args[0])), []
| 24.714286
| 45
| 0.751445
| 28
| 173
| 4.5
| 0.607143
| 0.142857
| 0.222222
| 0.396825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006623
| 0.127168
| 173
| 6
| 46
| 28.833333
| 0.827815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
0ac117cb475c2f7356accd5e697b7d953dd9e1e4
| 124
|
py
|
Python
|
censere/actions/__init__.py
|
nhi-vanye/mars-censere
|
65678b7bc102e2adff2f78f8b3a13ba84cdf0a01
|
[
"BSD-4-Clause"
] | null | null | null |
censere/actions/__init__.py
|
nhi-vanye/mars-censere
|
65678b7bc102e2adff2f78f8b3a13ba84cdf0a01
|
[
"BSD-4-Clause"
] | null | null | null |
censere/actions/__init__.py
|
nhi-vanye/mars-censere
|
65678b7bc102e2adff2f78f8b3a13ba84cdf0a01
|
[
"BSD-4-Clause"
] | null | null | null |
#pylint: disable=unused-import
from .families import make as make_families
from .families import breakup as break_families
| 24.8
| 47
| 0.830645
| 18
| 124
| 5.611111
| 0.555556
| 0.237624
| 0.356436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120968
| 124
| 4
| 48
| 31
| 0.926606
| 0.233871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0acffc5c1321f6165faa0c2b6ae19d0aaac73641
| 4,881
|
py
|
Python
|
parsing/tests/test_basic.py
|
sprymix/parsing
|
4f9602e54e035ace5fe04bfdfd117eb89be9e865
|
[
"MIT"
] | 29
|
2015-08-06T10:16:11.000Z
|
2020-12-29T06:03:05.000Z
|
parsing/tests/test_basic.py
|
MagicStack/parsing
|
4f9602e54e035ace5fe04bfdfd117eb89be9e865
|
[
"MIT"
] | 1
|
2017-04-18T18:43:05.000Z
|
2017-04-18T18:46:59.000Z
|
parsing/tests/test_basic.py
|
MagicStack/parsing
|
4f9602e54e035ace5fe04bfdfd117eb89be9e865
|
[
"MIT"
] | 7
|
2015-06-08T07:38:37.000Z
|
2021-06-01T19:21:22.000Z
|
import unittest
import parsing
class TestParsing(unittest.TestCase):
def test_basic_a(self):
class TestParser(parsing.Lr):
def __init__(self, spec):
parsing.Lr.__init__(self, spec)
from parsing.tests.specs import a
spec = parsing.Spec(a)
parser = TestParser(spec)
parser.token(a.TokenId())
parser.token(a.TokenStar())
parser.token(a.TokenId())
parser.token(a.TokenPlus())
parser.token(a.TokenId())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")
parser = TestParser(spec)
parser.token(a.TokenId())
parser.token(a.TokenPlus())
parser.token(a.TokenId())
parser.token(a.TokenStar())
parser.token(a.TokenId())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]")
parser = TestParser(spec)
parser.token(a.TokenId())
parser.token(a.TokenStar())
parser.token(a.TokenLparen())
parser.token(a.TokenId())
parser.token(a.TokenPlus())
parser.token(a.TokenId())
parser.token(a.TokenRparen())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]")
def test_basic_b(self):
class TestParser(parsing.Glr):
def __init__(self, spec):
parsing.Glr.__init__(self, spec)
from parsing.tests.specs import b
spec = parsing.Spec(b, skinny=False)
parser = TestParser(spec)
parser.token(b.id())
parser.token(b.star())
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")
parser = TestParser(spec)
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.token(b.star())
parser.token(b.id())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID + [ID * ID]]")
parser = TestParser(spec)
parser.token(b.id())
parser.token(b.star())
parser.token(b.lparen())
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.token(b.rparen())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[ID * ([ID + ID])]")
def test_basic_d(self):
class TestParser(parsing.Glr):
def __init__(self, spec):
parsing.Glr.__init__(self, spec)
from parsing.tests.specs import d
spec = parsing.Spec(d, skinny=False)
parser = TestParser(spec)
parser.token(d.id())
parser.token(d.star())
parser.token(d.id())
parser.token(d.plus())
parser.token(d.id())
parser.token(d.star())
parser.token(d.id())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[[ID * ID] + [ID * ID]]")
def test_basic_h(self):
class TestGlrParser(parsing.Glr):
def __init__(self, spec):
parsing.Glr.__init__(self, spec)
from parsing.tests.specs import h
spec = parsing.Spec(h, skinny=False)
parser = TestGlrParser(spec)
parser.token(h.TokenI())
parser.token(h.TokenPlus())
parser.token(h.TokenI())
parser.token(h.TokenStar())
parser.token(h.TokenI())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(repr(parser.start[0]), "(i + (i * i))")
def test_basic_i(self):
class TestGlrParser(parsing.Glr):
def __init__(self, spec):
parsing.Glr.__init__(self, spec)
from parsing.tests.specs import i
self.assertRaises(parsing.SpecError, parsing.Spec, i)
def test_basic_pickle(self):
class TestGlrParser(parsing.Glr):
def __init__(self, spec):
parsing.Glr.__init__(self, spec)
from parsing.tests.specs import b
spec = parsing.Spec(b, skinny=False)
import pickle
specPickle = pickle.dumps(spec)
spec2 = pickle.loads(specPickle)
parser = TestGlrParser(spec2)
parser.token(b.id())
parser.token(b.star())
parser.token(b.id())
parser.token(b.plus())
parser.token(b.id())
parser.eoi()
self.assertEqual(len(parser.start), 1)
self.assertEqual(parser.start[0].val, "[[ID * ID] + ID]")
if __name__ == "__main__":
unittest.main()
| 29.762195
| 72
| 0.567712
| 587
| 4,881
| 4.60477
| 0.097104
| 0.207547
| 0.097669
| 0.062153
| 0.833518
| 0.81872
| 0.813541
| 0.777654
| 0.763226
| 0.763226
| 0
| 0.0057
| 0.28109
| 4,881
| 163
| 73
| 29.944785
| 0.764605
| 0
| 0
| 0.732824
| 0
| 0
| 0.03278
| 0
| 0
| 0
| 0
| 0
| 0.145038
| 1
| 0.091603
| false
| 0
| 0.068702
| 0
| 0.21374
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0ad609c00b3d1ed72c8b8e8d60bcbd8404ac20b7
| 3,547
|
py
|
Python
|
segmentron/utils/filesystem.py
|
ruijieren98/SegmenTron
|
e0973c4088be0e5c43d1959b2e0d0bfcb92a4f0e
|
[
"Apache-2.0"
] | null | null | null |
segmentron/utils/filesystem.py
|
ruijieren98/SegmenTron
|
e0973c4088be0e5c43d1959b2e0d0bfcb92a4f0e
|
[
"Apache-2.0"
] | null | null | null |
segmentron/utils/filesystem.py
|
ruijieren98/SegmenTron
|
e0973c4088be0e5c43d1959b2e0d0bfcb92a4f0e
|
[
"Apache-2.0"
] | null | null | null |
"""Filesystem utility functions."""
from __future__ import absolute_import
import os
import errno
import torch
import logging
from ..config import cfg
def save_checkpoint(model, epoch, optimizer=None, lr_scheduler=None, is_best=False):
"""Save Checkpoint"""
directory = os.path.expanduser(cfg.TRAIN.MODEL_SAVE_DIR)
directory = os.path.join(directory, '{}_{}_{}_{}'.format(cfg.MODEL.MODEL_NAME, cfg.MODEL.BACKBONE,
cfg.DATASET.NAME, cfg.TIME_STAMP))
if not os.path.exists(directory):
os.makedirs(directory)
filename = '{}.pth'.format(str(epoch))
filename = os.path.join(directory, filename)
model_state_dict = model.module.state_dict() if hasattr(model, 'module') else model.state_dict()
if is_best:
best_filename = 'best_model.pth'
best_filename = os.path.join(directory, best_filename)
torch.save(model_state_dict, best_filename)
else:
save_state = {
'epoch': epoch,
'state_dict': model_state_dict,
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict()
}
if not os.path.exists(filename):
torch.save(save_state, filename)
logging.info('Epoch {} model saved in: {}'.format(epoch, filename))
# remove last epoch
pre_filename = '{}.pth'.format(str(epoch - 1))
pre_filename = os.path.join(directory, pre_filename)
try:
if os.path.exists(pre_filename):
os.remove(pre_filename)
#None
except OSError as e:
logging.info(e)
def save_dp_checkpoint(model, epoch, optimizer=None, lr_scheduler=None, is_best=False):
"""Save Checkpoint"""
directory = os.path.expanduser(cfg.TRAIN.MODEL_SAVE_DIR)
directory = os.path.join(directory, '{}_{}_{}_{}'.format(cfg.MODEL.MODEL_NAME, cfg.MODEL.BACKBONE,
cfg.DATASET.NAME, cfg.TIME_STAMP))
if not os.path.exists(directory):
os.makedirs(directory)
filename = 'dp_{}.pth'.format(str(epoch))
filename = os.path.join(directory, filename)
model_state_dict = model.module.state_dict() if hasattr(model, 'module') else model.state_dict()
if is_best:
best_filename = 'dp_best_model.pth'
best_filename = os.path.join(directory, best_filename)
torch.save(model_state_dict, best_filename)
else:
save_state = {
'epoch': epoch,
'state_dict': model_state_dict,
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict()
}
if not os.path.exists(filename):
torch.save(save_state, filename)
logging.info('Epoch {} model saved in: {}'.format(epoch, filename))
# remove last epoch
pre_filename = 'dp_{}.pth'.format(str(epoch - 1))
pre_filename = os.path.join(directory, pre_filename)
try:
if os.path.exists(pre_filename):
os.remove(pre_filename)
#None
except OSError as e:
logging.info(e)
def makedirs(path):
"""Create directory recursively if not exists.
Similar to `makedir -p`, you can skip checking existence before this function.
Parameters
----------
path : str
Path of the desired dir
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
| 37.734043
| 102
| 0.608965
| 423
| 3,547
| 4.914894
| 0.196217
| 0.046176
| 0.03848
| 0.073112
| 0.835979
| 0.835979
| 0.834055
| 0.834055
| 0.834055
| 0.834055
| 0
| 0.000776
| 0.273471
| 3,547
| 93
| 103
| 38.139785
| 0.805976
| 0.081759
| 0
| 0.708333
| 0
| 0
| 0.068912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.083333
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0af8f645e9808c1fd7f0691921108e2905ce6aa9
| 12,610
|
py
|
Python
|
openGaussBase/testcase/SQL/DDL/partition/Opengauss_Function_DDL_Partition_Case0067.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/SQL/DDL/partition/Opengauss_Function_DDL_Partition_Case0067.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/SQL/DDL/partition/Opengauss_Function_DDL_Partition_Case0067.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 功能测试
Case Name : 测试A表外键对应A表主键
Description :
1.创建兼容mysql的数据库 期望:创建成功
2.建表指定外键关系 期望:创建成功
3.测试A表外键对应A表主键 期望:操作成功
4.创建兼容TD的数据库 期望:创建成功
5.建表指定外键关系 期望:创建成功
6.测试A表外键对应A表主键 期望:操作成功
7.创建兼容PG的数据库 期望:创建成功
8.建表指定外键关系 期望:创建成功
9.测试A表外键对应A表主键 期望:操作成功
10.清理环境 期望:清理成功
Expect :
History :
"""
import sys
import unittest
from yat.test import Node
from yat.test import macro
sys.path.append(sys.path[0] + "/../")
from testcase.utils.CommonSH import *
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
logger = Logger()
class IndexFileDamaged(unittest.TestCase):
def setUp(self):
logger.info(
'----------------------------Opengauss_Function_DDL_Parttion_Case0067开始执行-----------------------------')
self.userNode = Node('dbuser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.Constant = Constant()
def test_Index_file_damaged(self):
logger.info('----------------------------创建兼容mysql的数据库-----------------------------')
sql_cmd = '''
drop table if exists pstudent_table_05 cascade;
drop database if exists pguser;
CREATE DATABASE pguser DBCOMPATIBILITY 'B';
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertIn(self.Constant.DROP_DATABASE_SUCCESS, msg)
self.assertIn(self.Constant.CREATE_DATABASE_SUCCESS, msg)
logger.info('--------------------建表指定外键关系 期望:创建成功--------------------')
sql_cmd = '''
drop table if exists pstudent_table_05 cascade;
create table pstudent_table_05
(
s_date timestamp primary key,
s_name varchar not null,
m_date timestamp references pstudent_table_05 (s_date) on update cascade on delete set null
)partition by range(s_date) interval ('10 day') (partition part1 values less than ('1990-02-02 00:00:00'));
insert into pstudent_table_05 values (date '2020-09-01', '张三', date '2020-09-01');
insert into pstudent_table_05 values (date '2020-09-02', '李四', date '2020-09-02');
insert into pstudent_table_05 values (date '2020-09-03', '王二', date '2020-09-03');
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertIn(self.Constant.DROP_TABLE_SUCCESS, msg)
self.assertIn(self.Constant.TABLE_CREATE_SUCCESS, msg)
logger.info('----------------------------测试A表外键对应A表主键 期望:操作成功-----------------------------')
sql_cmd = '''
select * from pstudent_table_05;
update pstudent_table_05 set s_date = date '2020-09-09' where s_date = date '2020-09-01';
select * from pstudent_table_05;
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
self.assertIn(self.Constant.UPDATE_SUCCESS_MSG, msg)
self.assertIn("2020-09-09", msg)
logger.info(msg)
sql_cmd = '''
delete pstudent_table_05 where s_date = date '2020-09-02';
select * from pstudent_table_05;
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
self.assertIn(self.Constant.DELETE_SUCCESS_MSG, msg)
self.assertNotIn("2020-09-02", msg)
logger.info(msg)
logger.info('----------------------------创建兼容TD的数据库-----------------------------')
sql_cmd = '''
drop table if exists pstudent_table_05;
drop database if exists pguser;
CREATE DATABASE pguser DBCOMPATIBILITY 'C';
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertIn(self.Constant.DROP_DATABASE_SUCCESS, msg)
self.assertIn(self.Constant.CREATE_DATABASE_SUCCESS, msg)
logger.info('--------------------建表指定外键关系 期望:创建成功--------------------')
sql_cmd = '''
drop table if exists pstudent_table_05 cascade;
create table pstudent_table_05
(
s_date timestamp primary key,
s_name varchar not null,
m_date timestamp references pstudent_table_05 (s_date) on update cascade on delete set null
)partition by range(s_date) interval ('10 day') (partition part1 values less than ('1990-02-02 00:00:00'));
insert into pstudent_table_05 values (date '2020-09-01', '张三', date '2020-09-01');
insert into pstudent_table_05 values (date '2020-09-02', '李四', date '2020-09-02');
insert into pstudent_table_05 values (date '2020-09-03', '王二', date '2020-09-03');
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertIn(self.Constant.DROP_TABLE_SUCCESS, msg)
self.assertIn(self.Constant.TABLE_CREATE_SUCCESS, msg)
logger.info('----------------------------测试A表外键对应A表主键 期望:合理报错-----------------------------')
sql_cmd = '''
select * from pstudent_table_05;
update pstudent_table_05 set s_date = date '2020-09-09' where s_date = date '2020-09-01';
select * from pstudent_table_05;
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
self.assertIn(self.Constant.UPDATE_SUCCESS_MSG, msg)
self.assertIn("2020-09-09", msg)
logger.info(msg)
sql_cmd = '''
delete pstudent_table_05 where s_date = date '2020-09-02';
select * from pstudent_table_05;
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
self.assertIn(self.Constant.DELETE_SUCCESS_MSG, msg)
self.assertNotIn("2020-09-02", msg)
logger.info(msg)
logger.info('----------------------------创建兼容PG的数据库-----------------------------')
sql_cmd = '''
drop table if exists pstudent_table_05;
drop database if exists pguser;
CREATE DATABASE pguser DBCOMPATIBILITY 'PG';
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertIn(self.Constant.DROP_DATABASE_SUCCESS, msg)
self.assertIn(self.Constant.CREATE_DATABASE_SUCCESS, msg)
logger.info('--------------------建表指定外键关系 期望:创建成功--------------------')
sql_cmd = '''
drop table if exists pstudent_table_05 cascade;
create table pstudent_table_05
(
s_date timestamp primary key,
s_name varchar not null,
m_date timestamp references pstudent_table_05 (s_date) on update cascade on delete set null
)partition by range(s_date) interval ('10 day') (partition part1 values less than ('1990-02-02 00:00:00'));
insert into pstudent_table_05 values (date '2020-09-01', '张三', date '2020-09-01');
insert into pstudent_table_05 values (date '2020-09-02', '李四', date '2020-09-02');
insert into pstudent_table_05 values (date '2020-09-03', '王二', date '2020-09-03');
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertIn(self.Constant.DROP_TABLE_SUCCESS, msg)
self.assertIn(self.Constant.TABLE_CREATE_SUCCESS, msg)
logger.info('----------------------------测试A表外键对应A表主键 期望:合理报错-----------------------------')
sql_cmd = '''
select * from pstudent_table_05;
update pstudent_table_05 set s_date = date '2020-09-09' where s_date = date '2020-09-01';
select * from pstudent_table_05;
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
self.assertIn(self.Constant.UPDATE_SUCCESS_MSG, msg)
self.assertIn("2020-09-09", msg)
logger.info(msg)
sql_cmd = '''
delete pstudent_table_05 where s_date = date '2020-09-02';
select * from pstudent_table_05;
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d pguser -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
self.assertIn(self.Constant.DELETE_SUCCESS_MSG, msg)
self.assertNotIn("2020-09-02", msg)
logger.info(msg)
def tearDown(self):
logger.info('----------------------------删除表和数据库-----------------------------')
sql_cmd = '''
drop table if exists pstudent_table_05;
drop database if exists pguser;
'''
excute_cmd = f'''
source {self.DB_ENV_PATH} ;
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
logger.info(
'----------------------------Opengauss_Function_DDL_Parttion_Case0067执行完成-----------------------------')
| 45.359712
| 127
| 0.516971
| 1,413
| 12,610
| 4.431706
| 0.135173
| 0.056052
| 0.08863
| 0.068988
| 0.828649
| 0.828649
| 0.816512
| 0.816512
| 0.816512
| 0.806771
| 0
| 0.05003
| 0.337431
| 12,610
| 278
| 128
| 45.359712
| 0.699461
| 0.040285
| 0
| 0.872247
| 0
| 0.0837
| 0.636573
| 0.106705
| 0
| 0
| 0
| 0
| 0.105727
| 1
| 0.013216
| false
| 0
| 0.030837
| 0
| 0.048458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c129bac1a74987be0caeac85f7961ef383a0c4b
| 23,361
|
py
|
Python
|
tests/test_extras_mm_fields.py
|
fyntex/lib-cl-sii-python
|
b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34
|
[
"MIT"
] | 8
|
2020-03-07T19:58:40.000Z
|
2021-12-15T13:47:40.000Z
|
tests/test_extras_mm_fields.py
|
fyntex/lib-cl-sii-python
|
b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34
|
[
"MIT"
] | 141
|
2020-01-17T22:47:35.000Z
|
2022-03-31T18:29:47.000Z
|
tests/test_extras_mm_fields.py
|
fyntex/lib-cl-sii-python
|
b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34
|
[
"MIT"
] | 3
|
2020-03-07T20:30:02.000Z
|
2021-03-22T03:14:26.000Z
|
from datetime import date, datetime
import unittest
import marshmallow
from cl_sii.extras.mm_fields import (
RcvPeriodoTributario, RcvPeriodoTributarioField,
RcvTipoDocto, RcvTipoDoctoField,
Rut, RutField,
TipoDteEnum, TipoDteField,
)
class RutFieldTest(unittest.TestCase):
def setUp(self) -> None:
class MyObj:
def __init__(self, emisor_rut: Rut, other_field: int = None) -> None:
self.emisor_rut = emisor_rut
self.other_field = other_field
class MyBadObj:
def __init__(self, some_field: int) -> None:
self.some_field = some_field
class MyMmSchema(marshmallow.Schema):
class Meta:
strict = False
emisor_rut = RutField(
required=True,
load_from='RUT of Emisor',
)
other_field = marshmallow.fields.Integer(
required=False,
)
class MyMmSchemaStrict(marshmallow.Schema):
class Meta:
strict = True
emisor_rut = RutField(
required=True,
load_from='RUT of Emisor',
)
other_field = marshmallow.fields.Integer(
required=False,
)
self.MyObj = MyObj
self.MyBadObj = MyBadObj
self.MyMmSchema = MyMmSchema
self.MyMmSchemaStrict = MyMmSchemaStrict
def test_load_ok_valid(self) -> None:
schema = self.MyMmSchema()
data_valid_1 = {'RUT of Emisor': '1-1'}
data_valid_2 = {'RUT of Emisor': Rut('1-1')}
data_valid_3 = {'RUT of Emisor': ' 1.111.111-k \t '}
result = schema.load(data_valid_1)
self.assertDictEqual(dict(result.data), {'emisor_rut': Rut('1-1')})
self.assertDictEqual(dict(result.errors), {})
result = schema.load(data_valid_2)
self.assertDictEqual(dict(result.data), {'emisor_rut': Rut('1-1')})
self.assertDictEqual(dict(result.errors), {})
result = schema.load(data_valid_3)
self.assertDictEqual(dict(result.data), {'emisor_rut': Rut('1111111-K')})
self.assertDictEqual(dict(result.errors), {})
def test_dump_ok_valid(self) -> None:
schema = self.MyMmSchema()
obj_valid_1 = self.MyObj(emisor_rut=Rut('1-1'))
obj_valid_2 = self.MyObj(emisor_rut=None)
data, errors = schema.dump(obj_valid_1)
self.assertDictEqual(data, {'emisor_rut': '1-1', 'other_field': None})
self.assertDictEqual(errors, {})
data, errors = schema.dump(obj_valid_2)
self.assertDictEqual(data, {'emisor_rut': None, 'other_field': None})
self.assertDictEqual(errors, {})
def test_dump_ok_strange(self) -> None:
# If the class of the object to be dumped has attributes that do not match at all the
# fields of the schema, there are no errors! Even if the schema has `strict = True` set.
schema = self.MyMmSchema()
schema_strict = self.MyMmSchemaStrict()
obj_valid_1 = self.MyBadObj(some_field=123)
obj_valid_2 = self.MyBadObj(some_field=None)
data, errors = schema.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
def test_load_fail(self) -> None:
schema = self.MyMmSchema()
data_invalid_1 = {'RUT of Emisor': '123123123123'}
data_invalid_2 = {'RUT of Emisor': 123}
data_invalid_3 = {'RUT of Emisor': None}
data_invalid_4 = {}
result = schema.load(data_invalid_1)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'RUT of Emisor': ['Not a syntactically valid RUT.']}) # noqa: E501
result = schema.load(data_invalid_2)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'RUT of Emisor': ['Invalid input type.']})
result = schema.load(data_invalid_3)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'RUT of Emisor': ['Field may not be null.']})
result = schema.load(data_invalid_4)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'RUT of Emisor': ['Missing data for required field.']}) # noqa: E501
def test_dump_fail(self) -> None:
schema = self.MyMmSchema()
obj_invalid_1 = self.MyObj(emisor_rut=20)
obj_invalid_2 = self.MyObj(emisor_rut='123123123123')
obj_invalid_3 = self.MyObj(emisor_rut='')
data, errors = schema.dump(obj_invalid_1)
self.assertDictEqual(errors, {'emisor_rut': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_2)
self.assertDictEqual(errors, {'emisor_rut': ['Not a syntactically valid RUT.']})
data, errors = schema.dump(obj_invalid_3)
self.assertDictEqual(errors, {'emisor_rut': ['Not a syntactically valid RUT.']})
class TipoDteFieldTest(unittest.TestCase):
def setUp(self) -> None:
class MyObj:
def __init__(self, tipo_dte: TipoDteEnum, other_field: int = None) -> None:
self.tipo_dte = tipo_dte
self.other_field = other_field
class MyBadObj:
def __init__(self, some_field: int) -> None:
self.some_field = some_field
class MyMmSchema(marshmallow.Schema):
class Meta:
strict = False
tipo_dte = TipoDteField(
required=True,
load_from='source field name',
)
other_field = marshmallow.fields.Integer(
required=False,
)
class MyMmSchemaStrict(marshmallow.Schema):
class Meta:
strict = True
tipo_dte = TipoDteField(
required=True,
load_from='source field name',
)
other_field = marshmallow.fields.Integer(
required=False,
)
self.MyObj = MyObj
self.MyBadObj = MyBadObj
self.MyMmSchema = MyMmSchema
self.MyMmSchemaStrict = MyMmSchemaStrict
def test_load_ok_valid(self) -> None:
schema = self.MyMmSchema()
data_valid_1 = {'source field name': 33}
data_valid_2 = {'source field name': TipoDteEnum(33)}
data_valid_3 = {'source field name': ' 33 \t '}
result = schema.load(data_valid_1)
self.assertDictEqual(dict(result.data), {'tipo_dte': TipoDteEnum(33)})
self.assertDictEqual(dict(result.errors), {})
result = schema.load(data_valid_2)
self.assertDictEqual(dict(result.data), {'tipo_dte': TipoDteEnum(33)})
self.assertDictEqual(dict(result.errors), {})
result = schema.load(data_valid_3)
self.assertDictEqual(dict(result.data), {'tipo_dte': TipoDteEnum(33)})
self.assertDictEqual(dict(result.errors), {})
def test_dump_ok_valid(self) -> None:
schema = self.MyMmSchema()
obj_valid_1 = self.MyObj(tipo_dte=TipoDteEnum(33))
obj_valid_2 = self.MyObj(tipo_dte=None)
data, errors = schema.dump(obj_valid_1)
self.assertDictEqual(data, {'tipo_dte': 33, 'other_field': None})
self.assertDictEqual(errors, {})
data, errors = schema.dump(obj_valid_2)
self.assertDictEqual(data, {'tipo_dte': None, 'other_field': None})
self.assertDictEqual(errors, {})
def test_dump_ok_strange(self) -> None:
# If the class of the object to be dumped has attributes that do not match at all the
# fields of the schema, there are no errors! Even if the schema has `strict = True` set.
schema = self.MyMmSchema()
schema_strict = self.MyMmSchemaStrict()
obj_valid_1 = self.MyBadObj(some_field=123)
obj_valid_2 = self.MyBadObj(some_field=None)
data, errors = schema.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
def test_load_fail(self) -> None:
schema = self.MyMmSchema()
data_invalid_1 = {'source field name': '123'}
data_invalid_2 = {'source field name': True}
data_invalid_3 = {'source field name': None}
data_invalid_4 = {}
result = schema.load(data_invalid_1)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ['Not a valid Tipo DTE.']})
result = schema.load(data_invalid_2)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ['Invalid input type.']})
result = schema.load(data_invalid_3)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ['Field may not be null.']})
result = schema.load(data_invalid_4)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ['Missing data for required field.']}) # noqa: E501
def test_dump_fail(self) -> None:
schema = self.MyMmSchema()
obj_invalid_1 = self.MyObj(tipo_dte=100)
obj_invalid_2 = self.MyObj(tipo_dte=True)
obj_invalid_3 = self.MyObj(tipo_dte='FACTURA_ELECTRONICA')
obj_invalid_4 = self.MyObj(tipo_dte='')
obj_invalid_5 = self.MyObj(tipo_dte=date(2018, 12, 23))
data, errors = schema.dump(obj_invalid_1)
self.assertDictEqual(errors, {'tipo_dte': ['Not a valid Tipo DTE.']})
data, errors = schema.dump(obj_invalid_2)
self.assertDictEqual(errors, {'tipo_dte': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_3)
self.assertDictEqual(errors, {'tipo_dte': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_4)
self.assertDictEqual(errors, {'tipo_dte': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_5)
self.assertDictEqual(errors, {'tipo_dte': ['Invalid input type.']})
class RcvTipoDoctoFieldTest(unittest.TestCase):
def setUp(self) -> None:
class MyObj:
def __init__(self, tipo_docto: RcvTipoDocto, other_field: int = None) -> None:
self.tipo_docto = tipo_docto
self.other_field = other_field
class MyBadObj:
def __init__(self, some_field: int) -> None:
self.some_field = some_field
class MyMmSchema(marshmallow.Schema):
class Meta:
strict = False
tipo_docto = RcvTipoDoctoField(
required=True,
load_from='source field name',
)
other_field = marshmallow.fields.Integer(
required=False,
)
class MyMmSchemaStrict(marshmallow.Schema):
class Meta:
strict = True
tipo_docto = RcvTipoDoctoField(
required=True,
load_from='source field name',
)
other_field = marshmallow.fields.Integer(
required=False,
)
self.MyObj = MyObj
self.MyBadObj = MyBadObj
self.MyMmSchema = MyMmSchema
self.MyMmSchemaStrict = MyMmSchemaStrict
def test_load_ok_valid(self) -> None:
schema = self.MyMmSchema()
data_valid_1 = {'source field name': 33}
data_valid_2 = {'source field name': RcvTipoDocto(33)}
data_valid_3 = {'source field name': ' 33 \t '}
result = schema.load(data_valid_1)
self.assertDictEqual(dict(result.data), {'tipo_docto': RcvTipoDocto(33)})
self.assertDictEqual(dict(result.errors), {})
result = schema.load(data_valid_2)
self.assertDictEqual(dict(result.data), {'tipo_docto': RcvTipoDocto(33)})
self.assertDictEqual(dict(result.errors), {})
result = schema.load(data_valid_3)
self.assertDictEqual(dict(result.data), {'tipo_docto': RcvTipoDocto(33)})
self.assertDictEqual(dict(result.errors), {})
def test_dump_ok_valid(self) -> None:
schema = self.MyMmSchema()
obj_valid_1 = self.MyObj(tipo_docto=RcvTipoDocto(33))
obj_valid_2 = self.MyObj(tipo_docto=None)
data, errors = schema.dump(obj_valid_1)
self.assertDictEqual(data, {'tipo_docto': 33, 'other_field': None})
self.assertDictEqual(errors, {})
data, errors = schema.dump(obj_valid_2)
self.assertDictEqual(data, {'tipo_docto': None, 'other_field': None})
self.assertDictEqual(errors, {})
def test_dump_ok_strange(self) -> None:
# If the class of the object to be dumped has attributes that do not match at all the
# fields of the schema, there are no errors! Even if the schema has `strict = True` set.
schema = self.MyMmSchema()
schema_strict = self.MyMmSchemaStrict()
obj_valid_1 = self.MyBadObj(some_field=123)
obj_valid_2 = self.MyBadObj(some_field=None)
data, errors = schema.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
def test_load_fail(self) -> None:
schema = self.MyMmSchema()
data_invalid_1 = {'source field name': '123'}
data_invalid_2 = {'source field name': True}
data_invalid_3 = {'source field name': None}
data_invalid_4 = {}
result = schema.load(data_invalid_1)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ["Not a valid RCV's Tipo de Documento."]}) # noqa: E501
result = schema.load(data_invalid_2)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ['Invalid input type.']})
result = schema.load(data_invalid_3)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ['Field may not be null.']})
result = schema.load(data_invalid_4)
self.assertDictEqual(dict(result.data), {})
self.assertDictEqual(dict(result.errors), {'source field name': ['Missing data for required field.']}) # noqa: E501
def test_dump_fail(self) -> None:
schema = self.MyMmSchema()
obj_invalid_1 = self.MyObj(tipo_docto=100)
obj_invalid_2 = self.MyObj(tipo_docto=True)
obj_invalid_3 = self.MyObj(tipo_docto='FACTURA_ELECTRONICA')
obj_invalid_4 = self.MyObj(tipo_docto='')
obj_invalid_5 = self.MyObj(tipo_docto=date(2018, 12, 23))
data, errors = schema.dump(obj_invalid_1)
self.assertDictEqual(errors, {'tipo_docto': ["Not a valid RCV's Tipo de Documento."]})
data, errors = schema.dump(obj_invalid_2)
self.assertDictEqual(errors, {'tipo_docto': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_3)
self.assertDictEqual(errors, {'tipo_docto': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_4)
self.assertDictEqual(errors, {'tipo_docto': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_5)
self.assertDictEqual(errors, {'tipo_docto': ['Invalid input type.']})
class RcvPeriodoTributarioFieldTest(unittest.TestCase):
def setUp(self) -> None:
class MyObj:
def __init__(
self,
periodo_tributario: RcvPeriodoTributario,
other_field: int = None,
) -> None:
self.periodo_tributario = periodo_tributario
self.other_field = other_field
class MyBadObj:
def __init__(self, some_field: int) -> None:
self.some_field = some_field
class MyMmSchema(marshmallow.Schema):
class Meta:
strict = False
periodo_tributario = RcvPeriodoTributarioField(
required=True,
load_from='source field name',
)
other_field = marshmallow.fields.Integer(
required=False,
)
class MyMmSchemaStrict(marshmallow.Schema):
class Meta:
strict = True
periodo_tributario = RcvPeriodoTributarioField(
required=True,
load_from='source field name',
)
other_field = marshmallow.fields.Integer(
required=False,
)
self.MyObj = MyObj
self.MyBadObj = MyBadObj
self.MyMmSchema = MyMmSchema
self.MyMmSchemaStrict = MyMmSchemaStrict
def test_load_ok_valid(self) -> None:
schema = self.MyMmSchema()
data_valid_1 = {'source field name': '2019-12'}
data_valid_2 = {'source field name': RcvPeriodoTributario(year=2019, month=12)}
data_valid_3 = {'source field name': '2019-09'}
data_valid_4 = {'source field name': '2019-9'}
result = schema.load(data_valid_1)
self.assertEqual(
dict(result.data),
{'periodo_tributario': RcvPeriodoTributario(year=2019, month=12)},
)
self.assertEqual(dict(result.errors), {})
result = schema.load(data_valid_2)
self.assertEqual(
dict(result.data),
{'periodo_tributario': RcvPeriodoTributario(year=2019, month=12)},
)
self.assertEqual(dict(result.errors), {})
result = schema.load(data_valid_3)
self.assertEqual(
dict(result.data),
{'periodo_tributario': RcvPeriodoTributario(year=2019, month=9)},
)
self.assertEqual(dict(result.errors), {})
result = schema.load(data_valid_4)
self.assertEqual(
dict(result.data),
{'periodo_tributario': RcvPeriodoTributario(year=2019, month=9)},
)
self.assertEqual(dict(result.errors), {})
def test_dump_ok_valid(self) -> None:
schema = self.MyMmSchema()
obj_valid_1 = self.MyObj(periodo_tributario=RcvPeriodoTributario(year=2019, month=12))
obj_valid_2 = self.MyObj(periodo_tributario=RcvPeriodoTributario(year=2019, month=9))
obj_valid_3 = self.MyObj(periodo_tributario=None)
data, errors = schema.dump(obj_valid_1)
self.assertEqual(data, {'periodo_tributario': '2019-12', 'other_field': None})
self.assertEqual(errors, {})
data, errors = schema.dump(obj_valid_2)
self.assertEqual(data, {'periodo_tributario': '2019-09', 'other_field': None})
self.assertEqual(errors, {})
data, errors = schema.dump(obj_valid_3)
self.assertEqual(data, {'periodo_tributario': None, 'other_field': None})
self.assertEqual(errors, {})
def test_dump_ok_strange(self) -> None:
# If the class of the object to be dumped has attributes that do not match at all the
# fields of the schema, there are no errors! Even if the schema has `strict = True` set.
schema = self.MyMmSchema()
schema_strict = self.MyMmSchemaStrict()
obj_valid_1 = self.MyBadObj(some_field=123)
obj_valid_2 = self.MyBadObj(some_field=None)
data, errors = schema.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_1)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
data, errors = schema_strict.dump(obj_valid_2)
self.assertEqual((data, errors), ({}, {}))
def test_load_fail(self) -> None:
schema = self.MyMmSchema()
data_invalid_1 = {'source field name': '2019-12-01'}
data_invalid_2 = {'source field name': 201912}
data_invalid_3 = {'source field name': ''}
data_invalid_4 = {'source field name': None}
data_invalid_5 = {}
result = schema.load(data_invalid_1)
self.assertEqual(dict(result.data), {})
self.assertEqual(
dict(result.errors),
{'source field name': ["Not a valid RCV Periodo Tributario."]},
)
result = schema.load(data_invalid_2)
self.assertEqual(dict(result.data), {})
self.assertEqual(dict(result.errors), {'source field name': ['Invalid input type.']})
result = schema.load(data_invalid_3)
self.assertEqual(dict(result.data), {})
self.assertEqual(
dict(result.errors),
{'source field name': ["Not a valid RCV Periodo Tributario."]},
)
result = schema.load(data_invalid_4)
self.assertEqual(dict(result.data), {})
self.assertEqual(dict(result.errors), {'source field name': ['Field may not be null.']})
result = schema.load(data_invalid_5)
self.assertEqual(dict(result.data), {})
self.assertEqual(
dict(result.errors),
{'source field name': ['Missing data for required field.']},
)
def test_dump_fail(self) -> None:
schema = self.MyMmSchema()
obj_invalid_1 = self.MyObj(periodo_tributario='2019-12-01')
obj_invalid_2 = self.MyObj(periodo_tributario=date(2019, 12, 1))
obj_invalid_3 = self.MyObj(periodo_tributario=datetime(2019, 12, 1, 22, 33))
obj_invalid_4 = self.MyObj(periodo_tributario='')
obj_invalid_5 = self.MyObj(periodo_tributario=201912)
obj_invalid_6 = self.MyObj(periodo_tributario=' 2019-12-01')
data, errors = schema.dump(obj_invalid_1)
self.assertEqual(errors, {'periodo_tributario': ["Not a valid RCV Periodo Tributario."]})
data, errors = schema.dump(obj_invalid_2)
self.assertEqual(errors, {'periodo_tributario': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_3)
self.assertEqual(errors, {'periodo_tributario': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_4)
self.assertEqual(errors, {'periodo_tributario': ["Not a valid RCV Periodo Tributario."]})
data, errors = schema.dump(obj_invalid_5)
self.assertEqual(errors, {'periodo_tributario': ['Invalid input type.']})
data, errors = schema.dump(obj_invalid_6)
self.assertEqual(errors, {'periodo_tributario': ["Not a valid RCV Periodo Tributario."]})
| 37.317891
| 128
| 0.615813
| 2,678
| 23,361
| 5.175504
| 0.048544
| 0.091847
| 0.050794
| 0.087879
| 0.939033
| 0.906205
| 0.883405
| 0.859091
| 0.835859
| 0.834704
| 0
| 0.023461
| 0.262874
| 23,361
| 625
| 129
| 37.3776
| 0.781417
| 0.031934
| 0
| 0.742981
| 0
| 0
| 0.110792
| 0
| 0
| 0
| 0
| 0
| 0.24406
| 1
| 0.069114
| false
| 0
| 0.008639
| 0
| 0.138229
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c1501f3597edc79348f3fb3f93547e3e5a1eb99
| 22,849
|
py
|
Python
|
example/migrations/0001_initial.py
|
sasriawesome/django_trumbo
|
28372409837a9e97158428e3beb1ed1c74e8860c
|
[
"MIT"
] | 2
|
2020-05-24T00:40:42.000Z
|
2021-05-31T02:27:41.000Z
|
example/migrations/0001_initial.py
|
sasriawesome/django_trumbo
|
28372409837a9e97158428e3beb1ed1c74e8860c
|
[
"MIT"
] | null | null | null |
example/migrations/0001_initial.py
|
sasriawesome/django_trumbo
|
28372409837a9e97158428e3beb1ed1c74e8860c
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-05-20 17:06
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('pid', models.CharField(blank=True, help_text='Personal Identifier Number', max_length=256, null=True, verbose_name='PID')),
('gender', models.CharField(choices=[('L', 'Male'), ('P', 'Female')], default='L', max_length=1, verbose_name='gender')),
('date_of_birth', models.DateField(blank=True, default=django.utils.timezone.now, null=True, verbose_name='date of birth')),
('place_of_birth', models.CharField(blank=True, max_length=255, null=True, verbose_name='place of birth')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('nickname', models.CharField(blank=True, max_length=256, null=True, verbose_name='nick name')),
('about_me', models.TextField(blank=True, max_length=128, null=True, verbose_name='about me')),
('religion', models.CharField(blank=True, max_length=255, null=True, verbose_name='religion')),
('nation', models.CharField(blank=True, max_length=255, null=True, verbose_name='nation')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='person_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'verbose_name': 'Person',
'verbose_name_plural': 'Persons',
},
),
migrations.CreateModel(
name='Working',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('name', models.CharField(max_length=50, verbose_name='name')),
('institution', models.CharField(max_length=256, verbose_name='institution')),
('date_start', models.DateField(default=django.utils.timezone.now, verbose_name='date start')),
('date_end', models.DateField(default=django.utils.timezone.now, verbose_name='date end')),
('document_link', models.URLField(blank=True, help_text='Provide support document link', null=True, verbose_name='document link')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('department', models.CharField(max_length=256, verbose_name='department')),
('position', models.CharField(max_length=256, verbose_name='position')),
('description', models.TextField(blank=True, max_length=256, null=True, verbose_name='description')),
('employment', models.CharField(choices=[('CTR', 'Contract'), ('FXD', 'Fixed'), ('OSR', 'Outsource'), ('ELS', 'Else')], default='CTR', max_length=5, verbose_name='employment')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='work_histories', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='working_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Volunteer',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('organization', models.CharField(max_length=256, verbose_name='organization')),
('position', models.CharField(max_length=256, verbose_name='position')),
('description', models.TextField(max_length=256, verbose_name='description')),
('date_start', models.DateField(default=django.utils.timezone.now, verbose_name='date start')),
('date_end', models.DateField(default=django.utils.timezone.now, verbose_name='date end')),
('status', models.CharField(choices=[('ACT', 'Active'), ('INC', 'Inactive')], default='ACT', max_length=5, verbose_name='status')),
('document_link', models.URLField(blank=True, help_text='Provide support document link', null=True, verbose_name='document link')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='volunteers', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='volunteer_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='SocialMedia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('facebook', models.SlugField(blank=True, help_text='Facebook page or name', null=True)),
('twitter', models.SlugField(blank=True, help_text='Twitter username, without the @', max_length=255, null=True)),
('instagram', models.SlugField(blank=True, help_text='Instagram username, without the @', max_length=255, null=True)),
('youtube', models.SlugField(blank=True, help_text='Youtube channel name.', null=True)),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='social_media', to='example.Person')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Skill',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('name', models.CharField(max_length=256, verbose_name='name')),
('description', models.CharField(blank=True, max_length=256, null=True, verbose_name='description')),
('level', models.IntegerField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(10)], verbose_name='Skill level')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='skills', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='skill_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Publication',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('title', models.CharField(max_length=256, verbose_name='title')),
('description', models.CharField(blank=True, max_length=256, null=True, verbose_name='description')),
('publisher', models.CharField(blank=True, max_length=256, null=True, verbose_name='publisher')),
('date_published', models.DateField(blank=True, default=django.utils.timezone.now, null=True, verbose_name='published date')),
('document_link', models.URLField(blank=True, help_text='provide support document link', null=True, verbose_name='document link')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='publications', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publication_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PersonContact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone', models.CharField(blank=True, max_length=128, null=True, verbose_name='phone')),
('fax', models.CharField(blank=True, max_length=128, null=True, verbose_name='fax')),
('email', models.CharField(blank=True, help_text='your public email', max_length=128, null=True, verbose_name='email')),
('whatsapp', models.CharField(blank=True, max_length=128, null=True, verbose_name='whatsapp')),
('website', models.CharField(blank=True, max_length=128, null=True, verbose_name='website')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='contact', to='example.Person')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PersonAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_primary', models.BooleanField(default=True, verbose_name='primary')),
('name', models.CharField(choices=[('home', 'Home'), ('office', 'Office')], default='home', help_text='E.g. Home Address or Office Address', max_length=256, null=True, verbose_name='name')),
('street', models.CharField(blank=True, max_length=512, null=True, verbose_name='street')),
('city', models.CharField(blank=True, max_length=128, null=True, verbose_name='city')),
('province', models.CharField(blank=True, max_length=128, null=True, verbose_name='province')),
('country', models.CharField(blank=True, max_length=128, null=True, verbose_name='country')),
('zipcode', models.CharField(blank=True, max_length=128, null=True, verbose_name='zip code')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='example.Person')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='NonFormalEducation',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('name', models.CharField(max_length=50, verbose_name='name')),
('institution', models.CharField(max_length=256, verbose_name='institution')),
('date_start', models.DateField(default=django.utils.timezone.now, verbose_name='date start')),
('date_end', models.DateField(default=django.utils.timezone.now, verbose_name='date end')),
('document_link', models.URLField(blank=True, help_text='Provide support document link', null=True, verbose_name='document link')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('description', models.CharField(blank=True, max_length=256, null=True, verbose_name='description')),
('status', models.CharField(choices=[('FNS', 'Finished'), ('ONG', 'Ongoing'), ('UNF', 'Unfinished')], default='ONG', max_length=5, verbose_name='current status')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='non_formal_educations', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='nonformaleducation_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='FormalEducation',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('institution', models.CharField(max_length=256, verbose_name='institution')),
('date_start', models.DateField(default=django.utils.timezone.now, verbose_name='date start')),
('date_end', models.DateField(default=django.utils.timezone.now, verbose_name='date end')),
('document_link', models.URLField(blank=True, help_text='Provide support document link', null=True, verbose_name='document link')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('major', models.CharField(blank=True, help_text='ex: Information System or Accounting', max_length=256, null=True, verbose_name='major')),
('status', models.CharField(choices=[('FNS', 'Finished'), ('ONG', 'Ongoing'), ('UNF', 'Unfinished')], default='ONG', max_length=5, verbose_name='current status')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='formal_educations', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='formaleducation_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Family',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('relation', models.PositiveIntegerField(choices=[(1, 'Father'), (2, 'Mother'), (5, 'Husband'), (6, 'Wife'), (4, 'Children'), (3, 'Sibling'), (99, 'Other')], default=99, verbose_name='relation')),
('relationship', models.CharField(blank=True, max_length=256, null=True, verbose_name='other relation')),
('name', models.CharField(max_length=256, verbose_name='name')),
('date_of_birth', models.DateField(blank=True, default=django.utils.timezone.now, null=True, verbose_name='date of birth')),
('place_of_birth', models.CharField(blank=True, max_length=255, null=True, verbose_name='place of birth')),
('job', models.CharField(max_length=256, verbose_name='job')),
('address', models.TextField(blank=True, max_length=512, null=True, verbose_name='address')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='families', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='family_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Award',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('is_trash', models.BooleanField(default=False, editable=False, verbose_name='trash')),
('trashed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('name', models.CharField(max_length=256, verbose_name='name')),
('description', models.TextField(blank=True, max_length=256, null=True, verbose_name='description')),
('date', models.DateField(blank=True, default=django.utils.timezone.now, null=True, verbose_name='created date')),
('document_link', models.URLField(blank=True, help_text='Provide support document link', null=True, verbose_name='document link')),
('privacy', models.CharField(choices=[('anyone', 'Anyone'), ('users', 'All Users'), ('friends', 'All Friends'), ('students', 'All Students'), ('teachers', 'All Teachers'), ('employees', 'All Employees'), ('managers', 'All Managers'), ('me', 'Only Me')], default='anyone', help_text='Designates who can see this information.', max_length=128, verbose_name='privacy')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='awards', to='example.Person')),
('trashed_by', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='award_trashes', to=settings.AUTH_USER_MODEL, verbose_name='trashed by')),
],
options={
'abstract': False,
},
),
]
| 90.670635
| 383
| 0.632063
| 2,506
| 22,849
| 5.626496
| 0.094972
| 0.087376
| 0.040426
| 0.049858
| 0.869858
| 0.865248
| 0.84617
| 0.826596
| 0.821206
| 0.796667
| 0
| 0.011358
| 0.198521
| 22,849
| 251
| 384
| 91.031873
| 0.758587
| 0.001969
| 0
| 0.602459
| 1
| 0
| 0.235155
| 0.00307
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02459
| 0
| 0.040984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c1709ad80495d11c66f5c0f4c663b58139d1135
| 1,747
|
py
|
Python
|
20200303_萌新赛-劝退警告_常规套娃/hx.py
|
ctfwiki/subject_misc_ctfshow
|
2a51f5bc12e9c136841f9cc3ef88ed53df952054
|
[
"MIT"
] | 16
|
2020-09-26T06:17:57.000Z
|
2022-03-03T15:41:07.000Z
|
20200303_萌新赛-劝退警告_常规套娃/hx.py
|
ctfwiki/subject_misc_ctfshow
|
2a51f5bc12e9c136841f9cc3ef88ed53df952054
|
[
"MIT"
] | null | null | null |
20200303_萌新赛-劝退警告_常规套娃/hx.py
|
ctfwiki/subject_misc_ctfshow
|
2a51f5bc12e9c136841f9cc3ef88ed53df952054
|
[
"MIT"
] | 6
|
2020-08-30T09:09:08.000Z
|
2021-11-28T02:09:39.000Z
|
(lambda __print, __g, __y, __operator: [[[[[[(check(0), None)[1] for __g['check'], check.__name__ in [(lambda x: (lambda __l: [[(lambda __sentinel, __after, __items: __y(lambda __this: lambda: (lambda __i: [[(lambda __sentinel, __after, __items: __y(lambda __this: lambda: (lambda __i: [[__this() for __l['n'] in [(__operator.iadd(__l['n'], decode((arr[int(__l['i'])] + (decode((arr[11] + (chr(61) * 2))) * 2)))))]][0] for __l['i'] in [(__i)]][0] if __i is not __sentinel else __after())(next(__items, __sentinel)))())([], lambda: [__this() for __l['v'] in [(__operator.iadd(__l['v'], chr(int(__l['n']))))]][0], iter(__l['c'])) for __l['n'] in [('')]][0] for __l['c'] in [(__i)]][0] if __i is not __sentinel else __after())(next(__items, __sentinel)))())([], lambda: (lambda __after: (__print(__l['v']), __after())[1] if (__l['x'] == 1) else __after())(lambda: None), iter(flag.split(decode((arr[10] + (decode((arr[11] + (chr(61) * 2))) * 2)))))) for __l['v'] in [('')]][0] for __l['x'] in [(x)]][0])({}), 'check')]][0] for __g['decode'], decode.__name__ in [(lambda s: (lambda __l: [base64.b64decode(__l['s'].encode()).decode() for __l['s'] in [(s)]][0])({}), 'decode')]][0] for __g['encode'], encode.__name__ in [(lambda s: (lambda __l: [base64.b64encode(__l['s'].encode()).decode() for __l['s'] in [(s)]][0])({}), 'encode')]][0] for __g['flag'] in [('136-139-78-132-162-89-49-117-70-161-49-118-70-02-01-01-70-137-01-160')]][0] for __g['arr'] in [(['NQ', 'MQ', 'Mw', 'MA', 'NA', 'Ng', 'Mg', 'OQ', 'Nw', 'OA', 'LQ', 'PQ'])]][0] for __g['base64'] in [(__import__('base64', __g, __g))]][0])(__import__('__builtin__', level=0).__dict__['print'], globals(), (lambda f: (lambda x: x(x))(lambda y: f(lambda: y(y)()))), __import__('operator', level=0))
| 873.5
| 1,746
| 0.587865
| 280
| 1,747
| 3.089286
| 0.275
| 0.041619
| 0.028902
| 0.055491
| 0.386127
| 0.386127
| 0.386127
| 0.284393
| 0.284393
| 0.284393
| 0
| 0.061185
| 0.101889
| 1,747
| 1
| 1,747
| 1,747
| 0.490121
| 0
| 0
| 0
| 0
| 1
| 0.107041
| 0.038924
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 9
|
7c96c6e5d03d07337f23ac4c8c4ab08d3d92e71d
| 121
|
py
|
Python
|
HelloPython.py
|
DickZhang1008/HelloGit
|
0338730f1210b2a719d0e4f88878a9b5f8cb6791
|
[
"Apache-2.0"
] | null | null | null |
HelloPython.py
|
DickZhang1008/HelloGit
|
0338730f1210b2a719d0e4f88878a9b5f8cb6791
|
[
"Apache-2.0"
] | null | null | null |
HelloPython.py
|
DickZhang1008/HelloGit
|
0338730f1210b2a719d0e4f88878a9b5f8cb6791
|
[
"Apache-2.0"
] | null | null | null |
print("我能中100万")
print("我能中100万")
print("我能中100万")
print("我能中100万")
print("我能中100万")
print("我能中100万")
print('可以,哈哈哈哈')
| 12.1
| 16
| 0.68595
| 15
| 121
| 5.533333
| 0.266667
| 0.86747
| 1.228916
| 1.445783
| 0.927711
| 0.927711
| 0.927711
| 0.927711
| 0.927711
| 0.927711
| 0
| 0.160714
| 0.07438
| 121
| 10
| 17
| 12.1
| 0.580357
| 0
| 0
| 0.857143
| 0
| 0
| 0.404959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 13
|
7c97dc8aa5218e5e3b148f725d3223d4c8a658da
| 181
|
py
|
Python
|
pycspr/factory/__init__.py
|
momipsl/pycspr
|
82c1ca003525a3d205d2aa3b7da5d1ecd275e9b5
|
[
"Apache-2.0"
] | 2
|
2021-04-14T13:49:20.000Z
|
2021-07-06T22:07:02.000Z
|
pycspr/factory/__init__.py
|
momipsl/pycspr
|
82c1ca003525a3d205d2aa3b7da5d1ecd275e9b5
|
[
"Apache-2.0"
] | null | null | null |
pycspr/factory/__init__.py
|
momipsl/pycspr
|
82c1ca003525a3d205d2aa3b7da5d1ecd275e9b5
|
[
"Apache-2.0"
] | 1
|
2021-04-15T12:52:42.000Z
|
2021-04-15T12:52:42.000Z
|
import pycspr.factory.accounts as accounts
import pycspr.factory.cl_type_info as cl_type_info
import pycspr.factory.cl_type_info as cl_type
import pycspr.factory.deploys as deploys
| 36.2
| 50
| 0.867403
| 31
| 181
| 4.83871
| 0.290323
| 0.32
| 0.506667
| 0.28
| 0.493333
| 0.493333
| 0.493333
| 0.493333
| 0.493333
| 0
| 0
| 0
| 0.088398
| 181
| 4
| 51
| 45.25
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7cb9ad937fe3033dddc9d60316f80fae28ae3705
| 87,986
|
py
|
Python
|
setup.py
|
fitter22/python-pcl
|
3162310300a0b82bcfcbb8492545858bc8cf879c
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
fitter22/python-pcl
|
3162310300a0b82bcfcbb8492545858bc8cf879c
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
fitter22/python-pcl
|
3162310300a0b82bcfcbb8492545858bc8cf879c
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
from collections import defaultdict
from Cython.Distutils import build_ext
from distutils.core import setup
from distutils.extension import Extension
# from Cython.Build import cythonize # MacOS NG
from setuptools import setup, find_packages, Extension
import subprocess
import numpy
import sys
import platform
import os
import time
import shutil
from ctypes.util import find_library
setup_requires = []
install_requires = [
'filelock',
#'mock',
'nose',
# RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility
# https://github.com/scikit-image/scikit-image/issues/3655
# 'numpy>=1.15.1,!=1.50.0',
# numpy.ufunc size changed, may indicate binary incompatibility.
#'numpy>=1.16.1,!=1.16.2',
'Cython>=0.26.0',
]
def pkgconfig(flag):
# Equivalent in Python 2.7 (but not 2.6):
# subprocess.check_output(['pkg-config', flag] + pcl_libs).split()
p = subprocess.Popen(['pkg-config', flag] +
pcl_libs, stdout=subprocess.PIPE)
stdout, _ = p.communicate()
# Assume no evil spaces in filenames; unsure how pkg-config would
# handle those, anyway.
# decode() is required in Python 3. TODO how do know the encoding?
return stdout.decode().split()
def pkgconfig_win(flag, cut):
# Equivalent in Python 2.7 (but not 2.6):
# subprocess.check_output(['pkg-config', flag] + pcl_libs).split()
p = subprocess.Popen(['.\\pkg-config\\pkg-config.exe', flag] +
pcl_libs, stdout=subprocess.PIPE)
stdout, _ = p.communicate()
# Assume no evil spaces in filenames; unsure how pkg-config would
# handle those, anyway.
# decode() is required in Python 3. TODO how do know the encoding?
# return stdout.decode().split()
# Windows
return stdout.decode().replace('\r\n', '').replace('\ ', ' ').replace('/', '\\').split(cut)
if platform.system() == "Windows":
# Check 32bit or 64bit
is_64bits = sys.maxsize > 2**32
# if is_64bits == True
# environment Value
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "PCL_ROOT":
pcl_root = v
# print(pcl_root)
# print("%s: find environment PCL_ROOT" % pcl_root)
break
else:
print("cannot find environment PCL_ROOT", file=sys.stderr)
sys.exit(1)
# Add environment Value
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "PKG_CONFIG_PATH":
pkgconfigstr = v
break
else:
# print("cannot find environment PKG_CONFIG_PATH", file=sys.stderr)
print("cannot find environment PKG_CONFIG_PATH")
pkgconfigstr = pcl_root + '\\lib\\pkgconfig;' + pcl_root + \
'\\3rdParty\\FLANN\\lib\\pkgconfig;' + \
pcl_root + '\\3rdParty\\Eigen\\lib\\pkgconfig;'
os.environ["PKG_CONFIG_PATH"] = pcl_root + '\\lib\\pkgconfig;' + pcl_root + \
'\\3rdParty\\FLANN\\lib\\pkgconfig;' + \
pcl_root + '\\3rdParty\\Eigen\\lib\\pkgconfig;'
print("set environment PKG_CONFIG_PATH=%s" % pkgconfigstr)
# other package(common)
# BOOST_ROOT
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "BOOST_ROOT":
boost_root = v
break
else:
boost_root = pcl_root + '\\3rdParty\\Boost'
# EIGEN_ROOT
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "EIGEN_ROOT":
eigen_root = v
break
else:
eigen_root = pcl_root + '\\3rdParty\\Eigen'
# FLANN_ROOT
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "FLANN_ROOT":
flann_root = v
break
else:
flann_root = pcl_root + '\\3rdParty\\FLANN'
# QHULL_ROOT
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "QHULL_ROOT":
qhull_root = v
break
else:
qhull_root = pcl_root + '\\3rdParty\\Qhull'
# VTK_DIR
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "VTK_DIR":
vtk_root = v
break
else:
vtk_root = pcl_root + '\\3rdParty\\VTK'
# custom(CUDA)
# custom(WinPcap)
# get pkg-config.exe filePath
pkgconfigPath = os.getcwd() + '\\pkg-config\\pkg-config.exe'
print(pkgconfigPath)
# AppVeyor Check
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "PCL_VERSION":
pcl_version = '-' + v
break
else:
# Try to find PCL. XXX we should only do this when trying to build or install.
# in order of preference
PCL_SUPPORTED = ["-1.9", "-1.8", "-1.7", "-1.6", ""]
for pcl_version in PCL_SUPPORTED:
if subprocess.call(['.\\pkg-config\\pkg-config.exe', 'pcl_common%s' % pcl_version]) == 0:
# if subprocess.call([pkgconfigPath, 'pcl_common%s' % pcl_version]) == 0:
break
else:
print("%s: error: cannot find PCL, tried" %
sys.argv[0], file=sys.stderr)
for version in PCL_SUPPORTED:
print(' pkg-config pcl_common%s' % version, file=sys.stderr)
sys.exit(1)
print(pcl_version)
# pcl_version = '-1.6'
# Python Version Check
info = sys.version_info
if pcl_version == '-1.6':
# PCL 1.6.0 python Version == 3.4(>= 3.4?, 2.7 -> NG)
# Visual Studio 2010
if info.major == 3 and info.minor == 4:
boost_version = '1_49'
vtk_version = '5.8'
pcl_libs = ["common", "features", "filters", "kdtree", "octree",
"registration", "sample_consensus", "search", "segmentation",
"surface", "tracking", "visualization"]
pass
else:
print('no building Python Version')
sys.exit(1)
elif pcl_version == '-1.7':
# PCL 1.7.2 python Version >= 3.5
# Visual Studio 2015
if info.major == 3 and info.minor >= 5:
boost_version = '1_57'
vtk_version = '6.2'
pass
# pcl-1.7?
pcl_libs = ["2d", "common", "features", "filters", "geometry",
"io", "kdtree", "keypoints", "ml", "octree", "outofcore", "people",
"recognition", "registration", "sample_consensus", "search",
"segmentation", "surface", "tracking", "visualization"]
else:
print('no building Python Version')
sys.exit(1)
elif pcl_version == '-1.8':
# PCL 1.8.0 python Version >= 3.5
# Visual Studio 2015/2017
if info.major == 3 and info.minor >= 5:
# PCL 1.8.1
boost_version = '1_64'
vtk_version = '8.0'
# pcl-1.8
# 1.8.1 use 2d required features
pcl_libs = ["2d", "common", "features", "filters", "geometry",
"io", "kdtree", "keypoints", "ml", "octree", "outofcore", "people",
"recognition", "registration", "sample_consensus", "search",
"segmentation", "stereo", "surface", "tracking", "visualization"]
pass
else:
# if info.major == 2 and info.minor == 7:
# import _msvccompiler
# import distutils.msvc9compiler
#
# def find_vcvarsall(version):
# # use vc2017 set vcvarsall.bat path
# # return "C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/Build/vcvarsall.bat"
# # return "C:/Program Files (x86)/Microsoft Visual Studio/2017/BuildTools/VC/Auxiliary/Build/vcvarsall.bat"
# vcvarsall, vcruntime = _msvccompiler._find_vcvarsall('x64')
# if vcvarsall is not None:
# print('set msvc2017/2015 compiler')
# print(vcvarsall)
# return vcvarsall
# else:
# print('no set msvc2017/2015 compiler')
# return None
#
# distutils.msvc9compiler.find_vcvarsall = find_vcvarsall
#
# boost_version = '1_64'
# vtk_version = '8.0'
# # pcl-1.8
# # 1.8.1 use 2d required features
# pcl_libs = ["2d", "common", "features", "filters", "geometry",
# "io", "kdtree", "keypoints", "ml", "octree", "outofcore", "people",
# "recognition", "registration", "sample_consensus", "search",
# "segmentation", "stereo", "surface", "tracking", "visualization"]
# else:
# print('no building Python Version')
# sys.exit(1)
print('no building Python Version')
sys.exit(1)
elif pcl_version == '-1.9':
# PCL 1.9.1 python Version >= 3.5
# Visual Studio 2015/2017
if info.major == 3 and info.minor >= 5:
# PCL 1.9.1
boost_version = '1_68'
vtk_version = '8.1'
# pcl-1.9
# 1.9.1 use 2d required features
pcl_libs = ["2d", "common", "features", "filters", "geometry",
"io", "kdtree", "keypoints", "ml", "octree", "outofcore", "people",
"recognition", "registration", "sample_consensus", "search",
"segmentation", "stereo", "surface", "tracking", "visualization"]
pass
else:
# if info.major == 2 and info.minor == 7:
print('no building Python Version')
sys.exit(1)
else:
print('pcl_version Unknown')
sys.exit(1)
# Find build/link options for PCL using pkg-config.
pcl_libs = ["pcl_%s%s" % (lib, pcl_version) for lib in pcl_libs]
# pcl_libs += ['Eigen3']
# print(pcl_libs)
ext_args = defaultdict(list)
# set include path
ext_args['include_dirs'].append(numpy.get_include())
# no use pkg-config
if pcl_version == '-1.6':
# 1.6.0
# boost 1.5.5
# vtk 5.8
# + add VTK
inc_dirs = [pcl_root + '\\include\\pcl' + pcl_version,
pcl_root + '\\3rdParty\\Eigen\\include',
pcl_root + '\\3rdParty\\Boost\\include',
flann_root + '\\include',
qhull_root + '\\include',
vtk_root + '\\include\\vtk-' + vtk_version]
elif pcl_version == '-1.7':
# 1.7.2
# boost 1.5.7
# vtk 6.2
inc_dirs = [pcl_root + '\\include\\pcl' + pcl_version,
eigen_root + '\\eigen3',
boost_root + '\\include\\boost-' + boost_version,
flann_root + '\\include',
qhull_root + '\\include',
vtk_root + '\\include\\vtk-' + vtk_version]
elif pcl_version == '-1.8':
# 1.8.0
# boost 1.6.1
# vtk 7.0
# 1.8.1/vtk 8.0
inc_dirs = [pcl_root + '\\include\\pcl' + pcl_version,
eigen_root + '\\eigen3',
boost_root + '\\include\\boost-' + boost_version,
flann_root + '\\include',
qhull_root + '\\include',
vtk_root + '\\include\\vtk-' + vtk_version]
elif pcl_version == '-1.9':
# 1.9.1
# boost 1.6.8
# vtk 8.1?
# not path set libqhull/libqhull_r(conflict io.h)
inc_dirs = [pcl_root + '\\include\\pcl' + pcl_version,
eigen_root + '\\eigen3',
boost_root + '\\include\\boost-' + boost_version,
flann_root + '\\include',
qhull_root + '\\include',
vtk_root + '\\include\\vtk-' + vtk_version]
else:
inc_dirs = []
for inc_dir in inc_dirs:
ext_args['include_dirs'].append(inc_dir)
# for flag in pkgconfig_win('--libs-only-L', '-L'):
# print(flag.lstrip().rstrip())
# ext_args['library_dirs'].append(flag[2:])
# for flag in pkgconfig_win('--libs-only-other', '-l'):
# print(flag.lstrip().rstrip())
# ext_args['extra_link_args'].append(flag)
# end
# set library path
if pcl_version == '-1.6':
# 3rdParty(+Boost, +VTK)
lib_dirs = [pcl_root + '\\lib',
boost_root + '\\lib',
flann_root + '\\lib',
qhull_root + '\\lib',
vtk_root + '\\lib']
elif pcl_version == '-1.7':
# 1.7.2
# 3rdParty(+Boost, +VTK)
lib_dirs = [pcl_root + '\\lib',
boost_root + '\\lib',
flann_root + '\\lib',
qhull_root + '\\lib',
vtk_root + '\\lib']
elif pcl_version == '-1.8':
# 1.8.0
# 3rdParty(+Boost, +VTK)
lib_dirs = [pcl_root + '\\lib',
boost_root + '\\lib',
flann_root + '\\lib',
qhull_root + '\\lib',
vtk_root + '\\lib']
elif pcl_version == '-1.9':
# 1.9.1
# 3rdParty(+Boost, +VTK)
lib_dirs = [pcl_root + '\\lib',
boost_root + '\\lib',
flann_root + '\\lib',
qhull_root + '\\lib',
vtk_root + '\\lib']
else:
lib_dir = []
for lib_dir in lib_dirs:
ext_args['library_dirs'].append(lib_dir)
# OpenNI2?
# %OPENNI2_REDIST64% %OPENNI2_REDIST%
if pcl_version == '-1.6':
# release
# libreleases = ['pcl_apps_release', 'pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_octree_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_segmentation_release', 'pcl_search_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s', 'qhull', 'qhull_p', 'qhull_r', 'qhullcpp']
# release + vtk5.3?
libreleases = ['pcl_apps_release', 'pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_octree_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_segmentation_release', 'pcl_search_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s']
elif pcl_version == '-1.7':
# release
# libreleases = ['pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_octree_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_segmentation_release', 'pcl_search_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s', 'qhull', 'qhull_p', 'qhull_r', 'qhullcpp']
# release + vtk6.2?/6.3?
libreleases = ['pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_octree_release', 'pcl_outofcore_release', 'pcl_people_release', 'pcl_recognition_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_search_release', 'pcl_segmentation_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s', 'qhull', 'qhull_p', 'qhull_r', 'qhullcpp']
elif pcl_version == '-1.8':
# release
# libreleases = ['pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_octree_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_segmentation_release', 'pcl_search_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s', 'qhull', 'qhull_p', 'qhull_r', 'qhullcpp']
# release + vtk7.0
libreleases = ['pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_ml_release', 'pcl_octree_release', 'pcl_outofcore_release', 'pcl_people_release', 'pcl_recognition_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_search_release', 'pcl_segmentation_release', 'pcl_stereo_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s', 'qhull', 'qhull_p', 'qhull_r', 'qhullcpp']
elif pcl_version == '-1.9':
# release
# libreleases = ['pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_octree_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_segmentation_release', 'pcl_search_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s', 'qhull', 'qhull_p', 'qhull_r', 'qhullcpp']
# release + vtk8.1?
libreleases = ['pcl_common_release', 'pcl_features_release', 'pcl_filters_release', 'pcl_io_release', 'pcl_io_ply_release', 'pcl_kdtree_release', 'pcl_keypoints_release', 'pcl_ml_release', 'pcl_octree_release', 'pcl_outofcore_release', 'pcl_people_release', 'pcl_recognition_release', 'pcl_registration_release', 'pcl_sample_consensus_release', 'pcl_search_release', 'pcl_segmentation_release', 'pcl_stereo_release', 'pcl_surface_release', 'pcl_tracking_release', 'pcl_visualization_release', 'flann', 'flann_s', 'qhull', 'qhull_p', 'qhull_r', 'qhullcpp']
else:
libreleases = []
for librelease in libreleases:
ext_args['libraries'].append(librelease)
# vtk 5.8
# vtk 6.2/6.3
# vtk 7.0/8.0
# vtk 8.1
if vtk_version == '5.8':
# pcl1.6 3rdParty
# vtklibreleases = ['vtkInfovis', 'MapReduceMPI', 'vtkNetCDF', 'QVTK', 'vtkNetCDF_cxx', 'vtkRendering', 'vtkViews', 'vtkVolumeRendering', 'vtkWidgets', 'mpistubs', 'vtkalglib', 'vtkCharts', 'vtkexoIIc', 'vtkexpat', 'vtkCommon', 'vtkfreetype', 'vtkDICOMParser', 'vtkftgl', 'vtkFiltering', 'vtkhdf5', 'vtkjpeg', 'vtkGenericFiltering', 'vtklibxml2', 'vtkGeovis', 'vtkmetaio', 'vtkpng', 'vtkGraphics', 'vtkproj4', 'vtkHybrid', 'vtksqlite', 'vtksys', 'vtkIO', 'vtktiff', 'vtkImaging', 'vtkverdict', 'vtkzlib']
vtklibreleases = []
elif vtk_version == '6.3':
# pcl1.7.2 3rdParty
# vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLIC-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version]
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkexpat-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkfreetype-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjpeg-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtklibxml2-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkNetCDF_cxx-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkpng-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLIC-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
elif vtk_version == '7.0':
# pcl_version 1.8.0
# pcl1.6 3rdParty
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkexpat-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkfreetype-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjpeg-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtklibxml2-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkpng-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLIC-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
elif vtk_version == '8.0':
# pcl_version 1.8.1
# vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkfreetype-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjpeg-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtklibxml2-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkpng-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLIC-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
# vtk8.0
# all-in-one-package(OpenGL)
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
# conda?(OpenGL2)
# vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkDomainsChemistryOpenGL2-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersPoints-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersPython-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersTopology-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkglew-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOExportOpenGL2-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOTecplotTable-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtklibharu-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkPythonInterpreter-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL2-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PSOpenGL2-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingMatplotlib-' + vtk_version, 'vtkRenderingOpenGL2-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL2-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkWrappingTools-' + vtk_version, 'vtkCommonCorePython35D-8.0', 'vtkWrappingPython35Core-8.0']
elif vtk_version == '8.1':
# pcl_version 1.9.0/1.9.1
# all-in-one-package(OpenGL)
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersPoints-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersTopology-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOTecplotTable-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtklibharu-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtknetcdfcpp-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
# conda?(OpenGL2)
# vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkDomainsChemistryOpenGL2-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersPoints-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersPython-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersTopology-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkglew-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOExportOpenGL2-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOTecplotTable-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtklibharu-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtknetcdfcpp-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkPythonInterpreter-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL2-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PSOpenGL2-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingMatplotlib-' + vtk_version, 'vtkRenderingOpenGL2-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL2-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkWrappingTools-' + vtk_version]
else:
vtklibreleases = []
for librelease in vtklibreleases:
ext_args['libraries'].append(librelease)
# Note :
# vtk Version setting
# use vtk need library(Windows base library)
# http://public.kitware.com/pipermail/vtkusers/2008-July/047291.html
win_libreleases = ['kernel32', 'user32', 'gdi32', 'winspool', 'comdlg32',
'advapi32', 'shell32', 'ole32', 'oleaut32', 'uuid', 'odbc32', 'odbccp32']
for win_librelease in win_libreleases:
ext_args['libraries'].append(win_librelease)
# http://www.pcl-users.org/error-in-use-PCLVisualizer-td3719235.html
# Download MSSDKs
# http://msdn.microsoft.com/en-us/windows/bb980924.aspx
#
# http://stackoverflow.com/questions/1236670/how-to-make-opengl-apps-in-64-bits-windows
# C:\Program Files (x86)\Microsoft SDKs\Windows\7.0\Lib\x64\OpenGL32.lib
# C:\Program Files (x86)\Microsoft SDKs\Windows\v7.0A\Lib\x64\OpenGL32.lib
# Add OpenGL32 .h/.lib
win_kit_incs = []
win_kit_libdirs = []
# using _open, _close, _chsize functions (pcl/io/low_level_io.h)
# win_kit_libreleases = ['ucrt', 'libucrt']
# for win_kit_librelease in win_kit_libreleases:
# ext_args['libraries'].append(win_kit_librelease)
if pcl_version == '-1.6':
if is_64bits == True:
# win_opengl_libdirs = ['C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v7.0A\\Lib\\x64']
# AppVeyor
win_kit_libdirs = [
'C:\\Program Files\\Microsoft SDKs\\Windows\\v7.1\\Lib\\x64']
else:
# win_opengl_libdirs = ['C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v7.0A\\Lib\\win32']
# AppVeyor
win_kit_libdirs = [
'C:\\Program Files\\Microsoft SDKs\\Windows\\v7.1\\Lib\\win32']
elif pcl_version == '-1.7':
if is_64bits == True:
win_kit_libdirs = [
'C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v8.0A\\Lib\\x64']
else:
win_kit_libdirs = [
'C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v8.0A\\Lib\\win32']
elif pcl_version == '-1.8':
if is_64bits == True:
# already set path
# win_kit_libdirs = ['C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v8.1A\\Lib\\x64']
# Windows OS 7?
# win_kit_incs = ['C:\\Program Files (x86)\\Windows Kits\\8.1\\Include\\shared', 'C:\\Program Files (x86)\\Windows Kits\\8.1\\Include\\um']
# win_kit_libdirs = ['C:\\Program Files (x86)\\Windows Kits\\8.1\\Lib\\winv6.3\\um\\x64']
# win_kit_libdirs = ['C:\\Program Files (x86)\\Windows Kits\\10\\Lib\\10.0.10240.0\\ucrt\\x64']
# Windows OS 8/8.1/10?
# win_kit_10_version = '10.0.10240.0'
# win_kit_incs = ['C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.10240.0\\ucrt', 'C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.10240.0\\um']
# win_kit_libdirs = ['C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.10240.0\\ucrt', 'C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.10240.0\\um']
pass
else:
# already set path
# Windows OS 7
# win_kit_libdirs = ['C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v8.1A\\Lib\\win32']
# win_kit_libdirs = ['C:\\Program Files (x86)\\Windows Kits\\8.1\\Lib\\winv6.3\\um\\x86']
# win_kit_incs = ['C:\\Program Files (x86)\\Windows Kits\\8.1\\Include\\shared', 'C:\\Program Files (x86)\\Windows Kits\\8.1\\Include\\um']
pass
elif pcl_version == '-1.9':
if is_64bits == True:
# win_kit_10_version = '10.0.15063.0'
# win_kit_incs = ['C:\\Program Files (x86)\\Windows Kits\\10\\Include\\' + win_kit_10_version+ '\\ucrt', 'C:\\Program Files (x86)\\Windows Kits\\10\\Include\\' + win_kit_10_version + '\\um']
# win_kit_libdirs = ['C:\\Program Files (x86)\\Windows Kits\\10\\Include\\' + win_kit_10_version + '\\ucrt\\x64', 'C:\\Program Files (x86)\\Windows Kits\\10\\Include\\' + win_kit_10_version + '\\um\\x64']
pass
else:
pass
else:
pass
for inc_dir in win_kit_incs:
ext_args['include_dirs'].append(inc_dir)
for lib_dir in win_kit_libdirs:
ext_args['library_dirs'].append(lib_dir)
win_opengl_libreleases = ['OpenGL32']
for opengl_librelease in win_opengl_libreleases:
ext_args['libraries'].append(opengl_librelease)
# use OpenNI
# use OpenNI2
# add environment PATH : pcl/bin, OpenNI2/Tools
# use CUDA?
# CUDA_PATH
# CUDA_PATH_V7_5
# CUDA_PATH_V8_0
for k, v in os.environ.items():
# print("{key} : {value}".format(key=k, value=v))
if k == "CUDA_PATH":
cuda_root = v
break
else:
print('No use cuda.')
pass
# ext_args['define_macros'].append(('EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET', '1'))
# define_macros=[('BOOST_NO_EXCEPTIONS', 'None')],
# debugs = [('EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET', '1'), ('BOOST_NO_EXCEPTIONS', 'None')]
# _CRT_SECURE_NO_WARNINGS : windows cutr warning no view
defines = [('EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET',
'1'), ('_CRT_SECURE_NO_WARNINGS', '1')]
for define in defines:
ext_args['define_macros'].append(define)
# ext_args['extra_compile_args'].append('/DWIN32')
# ext_args['extra_compile_args'].append('/D_WINDOWS')
# ext_args['extra_compile_args'].append('/W3')
# ext_args['extra_compile_args'].append('/GR')
ext_args['extra_compile_args'].append('/EHsc')
# FW: Link time errors in RangeImage (with /clr)
# http://www.pcl-users.org/FW-Link-time-errors-in-RangeImage-with-clr-td3581422.html
# ext_args['extra_compile_args'].append('/clr:nostdlib')
# OpenNI2?(+Python3)
# https://ci.appveyor.com/project/KazuakiM/vim-ms-translator/branch/master
# ext_args['extra_compile_args'].append('/DDYNAMIC_MSVCRT_DLL=\"msvcr100.dll\"')
# ext_args['extra_compile_args'].append('/DDYNAMIC_MSVCRT_DLL=\"msvcr100.dll\"')
# NG
# ext_args['extra_compile_args'].append('/NODEFAULTLIB:msvcrtd')
# https://blogs.msdn.microsoft.com/vcblog/2015/03/03/introducing-the-universal-crt/
# runtime libraries args(default MT?)
# use all-in-one package on vtk libraries.(use Dynamic?)
ext_args['extra_compile_args'].append('/MD')
# ext_args['extra_compile_args'].append('/MDd')
# custom build module(static build)
# ext_args['extra_compile_args'].append('/MTd')
# ext_args['extra_compile_args'].append('/MT')
# use OpenMP
# https://stackoverflow.com/questions/7844830/cython-openmp-compiler-flag
# ext_args['extra_compile_args'].append('/openmp')
# ext_args['extra_link_args'].append('/openmp')
# Debug View
# print(ext_args)
if pcl_version == '-1.6':
module = [Extension("pcl._pcl", ["pcl/_pcl.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
# Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_visualization", ["pcl/pcl_visualization_160.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
elif pcl_version == '-1.7':
module = [Extension("pcl._pcl", ["pcl/_pcl_172.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
elif pcl_version == '-1.8':
module = [Extension("pcl._pcl", ["pcl/_pcl_180.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# conda
# Extension("pcl.pcl_visualization", [
# "pcl/pcl_visualization.pyx", "pcl/vtkInteracterWrapper.cpp"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
elif pcl_version == '-1.9':
module = [Extension("pcl._pcl", ["pcl/_pcl_190.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# conda
# Extension("pcl.pcl_visualization", [
# "pcl/pcl_visualization.pyx", "pcl/vtkInteracterWrapper.cpp"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
else:
print('no pcl install or pkg-config missed.')
sys.exit(1)
# copy the pcl dll to local subfolder so that it can be added to the package through the data_files option
listDlls = []
if not os.path.isdir('./dlls'):
os.mkdir('./dlls')
for dll in libreleases:
pathDll = find_library(dll)
if not pathDll is None:
shutil.copy2(pathDll, './dlls')
listDlls.append(os.path.join('.\\dlls', dll+'.dll'))
# the path is relative to the python root folder
data_files = [('Lib/site-packages/pcl', listDlls)]
else:
# Not 'Windows'
if sys.platform == 'darwin':
os.environ['ARCHFLAGS'] = ''
# Try to find PCL. XXX we should only do this when trying to build or install.
PCL_SUPPORTED = ["-1.9", "-1.8", "-1.7", "-1.6", ""] # in order of preference
for pcl_version in PCL_SUPPORTED:
if subprocess.call(['pkg-config', 'pcl_common%s' % pcl_version]) == 0:
break
else:
print("%s: error: cannot find PCL, tried" %
sys.argv[0], file=sys.stderr)
for version in PCL_SUPPORTED:
print(' pkg-config pcl_common%s' % version, file=sys.stderr)
sys.exit(1)
# Find build/link options for PCL using pkg-config.
# version 1.6
# pcl_libs = ["common", "features", "filters", "io", "kdtree", "octree",
# "registration", "sample_consensus", "search", "segmentation",
# "surface", "tracking", "visualization"]
# version 1.7
if pcl_version == '-1.7':
pcl_libs = ["common", "features", "filters", "geometry",
"io", "kdtree", "keypoints", "octree", "outofcore", "people",
"recognition", "registration", "sample_consensus", "search",
"segmentation", "surface", "tracking", "visualization"]
else:
# version 1.8
pcl_libs = ["2d", "common", "features", "filters", "geometry",
"io", "kdtree", "keypoints", "ml", "octree", "outofcore", "people",
"recognition", "registration", "sample_consensus", "search",
"segmentation", "stereo", "surface", "tracking", "visualization"]
pcl_libs = ["pcl_%s%s" % (lib, pcl_version) for lib in pcl_libs]
ext_args = defaultdict(list)
ext_args['include_dirs'].append(numpy.get_include())
for flag in pkgconfig('--cflags-only-I'):
ext_args['include_dirs'].append(flag[2:])
# OpenNI?
# "-I/usr/include/openni"
# "-I/usr/include/openni"
# /usr/include/ni
ext_args['include_dirs'].append('/usr/include/ni')
# ext_args['library_dirs'].append()
# ext_args['libraries'].append()
# OpenNI2
ext_args['include_dirs'].append('/usr/include/openni2')
# VTK use
if sys.platform == 'darwin':
# pcl 1.8.1(MacOSX)
# if pcl_version == '-1.8':
# vtk_version = '8.0'
# ext_args['include_dirs'].append('/usr/local/include/vtk-' + vtk_version)
# ext_args['library_dirs'].append('/usr/local/lib')
# ext_args['include_dirs'].append('/usr/local/Cellar/vtk/8.0.1/include')
# ext_args['library_dirs'].append('/usr/local/Cellar/vtk/8.0.1/lib')
if pcl_version == '-1.9':
# pcl 1.9.1
# build install?
# vtk_version = '8.1'
# vtk_include_dir = os.path.join('/usr/local' ,'include/vtk-8.1')
# vtk_library_dir = os.path.join('/usr/local', 'lib')
# homebrew(MacOSX homebrew)
# (pcl 1.9.1_3)
# vtk_version = '8.1.2_3'
# vtk_include_dir = os.path.join('/usr/local/Cellar/vtk', vtk_version ,'include/vtk-8.2')
# 2019/05/08 check(pcl 1.9.1_4)
vtk_version = '8.2.0'
vtk_include_dir = os.path.join('/usr/local/Cellar/vtk', vtk_version ,'include/vtk-8.2')
vtk_library_dir = os.path.join('/usr/local/Cellar/vtk', vtk_version, 'lib')
pass
else:
# pcl 1.7.0?(Ubuntu 14.04)
# vtk_version = '5.8'
# ext_args['include_dirs'].append('/usr/include/vtk-' + vtk_version)
# ext_args['library_dirs'].append('/usr/lib')
# pcl 1.7.2(Ubuntu 16.04)(xenial)
if pcl_version == '-1.7':
vtk_version = '6.2'
vtk_include_dir = os.path.join('/usr/include/vtk-' + vtk_version)
vtk_library_dir = os.path.join('/usr/lib')
elif pcl_version == '-1.8':
# pcl 1.8.0/1?(Ubuntu 18.04)(melodic)
vtk_version = '7.0'
# pcl 1.8.1?
# vtk_version = '8.0'
vtk_include_dir = os.path.join('/usr/include/vtk-' + vtk_version)
vtk_library_dir = os.path.join('/usr/lib')
elif pcl_version == '-1.9':
# pcl 1.9.1
# build install?
vtk_version = '8.1'
vtk_include_dir = os.path.join('/usr/include/vtk-' + vtk_version)
vtk_library_dir = os.path.join('/usr/lib')
else:
pass
# other
# pcl 1.9.1(Conda)
# vtk_version = '8.1'
# vtk_include_dir = os.path.join(os.environ["PREFIX"] ,'include/vtk-8.1')
# vtk_library_dir = os.path.join(os.environ["PREFIX"], 'lib')
ext_args['include_dirs'].append(vtk_include_dir)
ext_args['library_dirs'].append(vtk_library_dir)
if vtk_version == '5.8':
vtklibreleases = ['vtkInfovis', 'MapReduceMPI', 'vtkNetCDF', 'QVTK', 'vtkNetCDF_cxx', 'vtkRendering', 'vtkViews', 'vtkVolumeRendering', 'vtkWidgets', 'mpistubs', 'vtkalglib', 'vtkCharts', 'vtkexoIIc', 'vtkexpat', 'vtkCommon', 'vtkfreetype', 'vtkDICOMParser', 'vtkftgl', 'vtkFiltering', 'vtkhdf5', 'vtkjpeg', 'vtkGenericFiltering', 'vtklibxml2', 'vtkGeovis', 'vtkmetaio', 'vtkpng', 'vtkGraphics', 'vtkproj4', 'vtkHybrid', 'vtksqlite', 'vtksys', 'vtkIO', 'vtktiff', 'vtkImaging', 'vtkverdict', 'vtkzlib']
elif vtk_version == '6.3':
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkexpat-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkfreetype-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjpeg-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtklibxml2-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkNetCDF_cxx-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkpng-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLIC-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
elif vtk_version == '7.0':
# apt package?(vtk use OpenGL?)
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkexpat-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkfreetype-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjpeg-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtklibxml2-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkpng-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLIC-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
elif vtk_version == '8.0':
# vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PSOpenGL2-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
# apt package?(vtk use OpenGL?)
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkDomainsChemistryOpenGL2-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersPoints-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersPython-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersTopology-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkglew-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOExportOpenGL2-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOTecplotTable-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtklibharu-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkPythonInterpreter-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL2-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingMatplotlib-' + vtk_version, 'vtkRenderingOpenGL2-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL2-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkWrappingTools-' + vtk_version]
elif vtk_version == '8.1':
# pcl_version 1.9.1
# conda or build module, MacOS X
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkDomainsChemistryOpenGL2-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersPoints-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersTopology-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkglew-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOExportOpenGL2-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOTecplotTable-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtklibharu-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtknetcdfcpp-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL2-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PSOpenGL2-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version,'vtkRenderingOpenGL2-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL2-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version]
else:
vtklibreleases = []
for librelease in vtklibreleases:
ext_args['libraries'].append(librelease)
for flag in pkgconfig('--cflags-only-other'):
if flag.startswith('-D'):
macro, value = flag[2:].split('=', 1)
ext_args['define_macros'].append((macro, value))
else:
ext_args['extra_compile_args'].append(flag)
# clang?
# https://github.com/strawlab/python-pcl/issues/129
# gcc base libc++, clang base libstdc++
# ext_args['extra_compile_args'].append("-stdlib=libstdc++")
# ext_args['extra_compile_args'].append("-stdlib=libc++")
if sys.platform == 'darwin':
# not use gcc?
# ext_args['extra_compile_args'].append("-stdlib=libstdc++")
# clang(min : 10.7?/10.9?)
# minimum deployment target of OS X 10.9
ext_args['extra_compile_args'].append("-stdlib=libc++")
ext_args['extra_compile_args'].append("-mmacosx-version-min=10.9")
ext_args['extra_link_args'].append("-stdlib=libc++")
ext_args['extra_link_args'].append("-mmacosx-version-min=10.9")
# vtk error : not set override function error.
ext_args['extra_compile_args'].append("-std=c++11")
# mac os using openmp
# https://iscinumpy.gitlab.io/post/omp-on-high-sierra/
# before setting.
# $ brew install libomp
# ext_args['extra_compile_args'].append('-fopenmp -Xpreprocessor')
# ext_args['extra_link_args'].append('-fopenmp -Xpreprocessor -lomp')
pass
else:
ext_args['extra_compile_args'].append("-std=c++11")
ext_args['library_dirs'].append("/usr/lib/x86_64-linux-gnu/")
# gcc? use standard library
# ext_args['extra_compile_args'].append("-stdlib=libstdc++")
# ext_args['extra_link_args'].append("-stdlib=libstdc++")
# clang use standard library
# ext_args['extra_compile_args'].append("-stdlib=libc++")
# ext_args['extra_link_args'].append("-stdlib=libc++")
# using openmp
# ext_args['extra_compile_args'].append('-fopenmp')
# ext_args['extra_link_args'].append('-fopenmp')
pass
for flag in pkgconfig('--libs-only-l'):
if flag == "-lflann_cpp-gd":
print(
"skipping -lflann_cpp-gd (see https://github.com/strawlab/python-pcl/issues/29")
continue
ext_args['libraries'].append(flag[2:])
for flag in pkgconfig('--libs-only-L'):
ext_args['library_dirs'].append(flag[2:])
for flag in pkgconfig('--libs-only-other'):
ext_args['extra_link_args'].append(flag)
# grabber?
# -lboost_system
# ext_args['extra_link_args'].append('-lboost_system')
# MacOSX?
# ext_args['extra_link_args'].append('-lboost_system_mt')
# ext_args['extra_link_args'].append('-lboost_bind')
# Fix compile error on Ubuntu 12.04 (e.g., Travis-CI).
ext_args['define_macros'].append(
("EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET", "1"))
if pcl_version == '-1.6':
module = [Extension("pcl._pcl", ["pcl/_pcl.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
# Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization_160.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
elif pcl_version == '-1.7':
module = [Extension("pcl._pcl", ["pcl/_pcl_172.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
elif pcl_version == '-1.8':
module = [Extension("pcl._pcl", ["pcl/_pcl_180.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
elif pcl_version == '-1.9':
module = [Extension("pcl._pcl", ["pcl/_pcl_190.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
else:
print('no pcl install or pkg-config missed.')
sys.exit(1)
listDlls = []
data_files = None
setup(name='python-pcl',
description='Python bindings for the Point Cloud Library (PCL). using Cython.',
url='http://github.com/strawlab/python-pcl',
version='0.3.0rc1',
author='John Stowers',
author_email='john.stowers@gmail.com',
maintainer='Tooru Oonuma',
maintainer_email='t753github@gmail.com',
license='BSD',
packages=[
"pcl",
# "pcl.pcl_visualization",
],
zip_safe=False,
setup_requires=setup_requires,
install_requires=install_requires,
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
tests_require=['mock', 'nose'],
ext_modules=module,
cmdclass={'build_ext': build_ext},
data_files=data_files
)
| 97.653718
| 3,707
| 0.670027
| 9,047
| 87,986
| 6.246933
| 0.075495
| 0.243825
| 0.007591
| 0.007927
| 0.898967
| 0.883644
| 0.86832
| 0.850308
| 0.840824
| 0.834702
| 0
| 0.014657
| 0.178824
| 87,986
| 900
| 3,708
| 97.762222
| 0.76755
| 0.379094
| 0
| 0.59436
| 0
| 0.002169
| 0.379442
| 0.085064
| 0
| 0
| 0
| 0.001111
| 0
| 1
| 0.004338
| false
| 0.030369
| 0.047722
| 0
| 0.056399
| 0.041215
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7cdadc65317a51c6a7e6476ec09e9dde2398d057
| 155
|
py
|
Python
|
cmyt/colormaps/pixel_blue.py
|
yt-project/cmyt
|
566ba60864b838827c51f2bddcd93efff97fde05
|
[
"BSD-3-Clause-Clear"
] | 3
|
2021-07-15T17:30:03.000Z
|
2021-07-16T01:09:01.000Z
|
cmyt/colormaps/pixel_blue.py
|
yt-project/cmyt
|
566ba60864b838827c51f2bddcd93efff97fde05
|
[
"BSD-3-Clause-Clear"
] | 20
|
2021-07-15T17:09:46.000Z
|
2022-02-07T09:35:54.000Z
|
cmyt/colormaps/pixel_blue.py
|
yt-project/cmyt
|
566ba60864b838827c51f2bddcd93efff97fde05
|
[
"BSD-3-Clause-Clear"
] | 2
|
2021-07-15T13:26:03.000Z
|
2021-07-15T13:29:08.000Z
|
data = {
"red": ((0.0, 0.0, 0.0), (1.0, 0.2, 0.2)),
"green": ((0.0, 0.0, 0.0), (1.0, 0.2, 0.2)),
"blue": ((0.0, 0.0, 0.0), (1.0, 1.0, 1.0)),
}
| 25.833333
| 48
| 0.335484
| 40
| 155
| 1.3
| 0.175
| 0.653846
| 0.692308
| 0.692308
| 0.692308
| 0.615385
| 0.615385
| 0.615385
| 0.461538
| 0.461538
| 0
| 0.305085
| 0.23871
| 155
| 5
| 49
| 31
| 0.135593
| 0
| 0
| 0
| 0
| 0
| 0.077419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b00f7f1475d6dac52789d562d37ad7c9d974839
| 405
|
py
|
Python
|
ChoateStudentHelp/__init__.py
|
brianHarder/choateStudentHelp
|
d0787e0f256688683fd12727e75031d953efa1e7
|
[
"MIT"
] | null | null | null |
ChoateStudentHelp/__init__.py
|
brianHarder/choateStudentHelp
|
d0787e0f256688683fd12727e75031d953efa1e7
|
[
"MIT"
] | null | null | null |
ChoateStudentHelp/__init__.py
|
brianHarder/choateStudentHelp
|
d0787e0f256688683fd12727e75031d953efa1e7
|
[
"MIT"
] | null | null | null |
from ChoateStudentHelp.ChoateStudentHelp_module import Math
from ChoateStudentHelp.ChoateStudentHelp_module import standardizedTest
from ChoateStudentHelp.ChoateStudentHelp_module import stocks
from ChoateStudentHelp.ChoateStudentHelp_module import fortune_teller
from ChoateStudentHelp.ChoateStudentHelp_module import walking_time
from ChoateStudentHelp.ChoateStudentHelp_module import visual_design
| 67.5
| 72
| 0.916049
| 39
| 405
| 9.282051
| 0.333333
| 0.348066
| 0.629834
| 0.729282
| 0.828729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069136
| 405
| 6
| 73
| 67.5
| 0.960212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6b05b7a9ca599032cbd31cb52a4fbf75a3def12a
| 63,634
|
py
|
Python
|
shadowsocks/obfsplugin/auth.py
|
metecyu/shadowsocksr
|
1f472d6975a1f40f80dfd53e41455b92b2fe1e48
|
[
"Apache-2.0"
] | null | null | null |
shadowsocks/obfsplugin/auth.py
|
metecyu/shadowsocksr
|
1f472d6975a1f40f80dfd53e41455b92b2fe1e48
|
[
"Apache-2.0"
] | null | null | null |
shadowsocks/obfsplugin/auth.py
|
metecyu/shadowsocksr
|
1f472d6975a1f40f80dfd53e41455b92b2fe1e48
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2015-2015 breakwa11
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
import binascii
import base64
import time
import datetime
import random
import math
import struct
import zlib
import hmac
import hashlib
import shadowsocks
from shadowsocks import common, lru_cache, encrypt
from shadowsocks.obfsplugin import plain
from shadowsocks.common import to_bytes, to_str, ord, chr
def create_auth_sha1(method):
return auth_sha1(method)
def create_auth_sha1_v2(method):
return auth_sha1_v2(method)
def create_auth_sha1_v4(method):
return auth_sha1_v4(method)
def create_auth_aes128(method):
return auth_aes128(method)
def create_auth_aes128_md5(method):
return auth_aes128_sha1(method, hashlib.md5)
def create_auth_aes128_sha1(method):
return auth_aes128_sha1(method, hashlib.sha1)
obfs_map = {
'auth_sha1': (create_auth_sha1,),
'auth_sha1_compatible': (create_auth_sha1,),
'auth_sha1_v2': (create_auth_sha1_v2,),
'auth_sha1_v2_compatible': (create_auth_sha1_v2,),
'auth_sha1_v4': (create_auth_sha1_v4,),
'auth_sha1_v4_compatible': (create_auth_sha1_v4,),
'auth_aes128': (create_auth_aes128,),
'auth_aes128_md5': (create_auth_aes128_md5,),
'auth_aes128_sha1': (create_auth_aes128_sha1,),
}
def match_begin(str1, str2):
if len(str1) >= len(str2):
if str1[:len(str2)] == str2:
return True
return False
class obfs_verify_data(object):
def __init__(self):
pass
class auth_base(plain.plain):
def __init__(self, method):
super(auth_base, self).__init__(method)
self.method = method
self.no_compatible_method = ''
def init_data(self):
return ''
def set_server_info(self, server_info):
self.server_info = server_info
def client_encode(self, buf):
return buf
def client_decode(self, buf):
return (buf, False)
def server_encode(self, buf):
return buf
def server_decode(self, buf):
return (buf, True, False)
def not_match_return(self, buf):
self.raw_trans = True
if self.method == self.no_compatible_method:
return (b'E'*2048, False)
return (buf, False)
class client_queue(object):
def __init__(self, begin_id):
self.front = begin_id - 64
self.back = begin_id + 1
self.alloc = {}
self.enable = True
self.last_update = time.time()
def update(self):
self.last_update = time.time()
def is_active(self):
return time.time() - self.last_update < 60 * 3
def re_enable(self, connection_id):
self.enable = True
self.front = connection_id - 64
self.back = connection_id + 1
self.alloc = {}
def insert(self, connection_id):
if not self.enable:
logging.warn('obfs auth: not enable')
return False
if not self.is_active():
self.re_enable(connection_id)
self.update()
if connection_id < self.front:
logging.warn('obfs auth: deprecated id, someone replay attack')
return False
if connection_id > self.front + 0x4000:
logging.warn('obfs auth: wrong id')
return False
if connection_id in self.alloc:
logging.warn('obfs auth: duplicate id, someone replay attack')
return False
if self.back <= connection_id:
self.back = connection_id + 1
self.alloc[connection_id] = 1
while (self.front in self.alloc) or self.front + 0x1000 < self.back:
if self.front in self.alloc:
del self.alloc[self.front]
self.front += 1
return True
class obfs_auth_data(object):
def __init__(self):
self.client_id = {}
self.startup_time = int(time.time() - 30) & 0xFFFFFFFF
self.local_client_id = b''
self.connection_id = 0
self.set_max_client(64) # max active client count
def update(self, client_id, connection_id):
if client_id in self.client_id:
self.client_id[client_id].update()
def set_max_client(self, max_client):
self.max_client = max_client
self.max_buffer = max(self.max_client * 2, 256)
def insert(self, client_id, connection_id):
if client_id not in self.client_id or not self.client_id[client_id].enable:
active = 0
for c_id in self.client_id:
if self.client_id[c_id].is_active():
active += 1
if active >= self.max_client:
logging.warn('obfs auth: max active clients exceeded')
return False
if len(self.client_id) < self.max_client:
if client_id not in self.client_id:
self.client_id[client_id] = client_queue(connection_id)
else:
self.client_id[client_id].re_enable(connection_id)
return self.client_id[client_id].insert(connection_id)
keys = self.client_id.keys()
random.shuffle(keys)
for c_id in keys:
if not self.client_id[c_id].is_active() and self.client_id[c_id].enable:
if len(self.client_id) >= self.max_buffer:
del self.client_id[c_id]
else:
self.client_id[c_id].enable = False
if client_id not in self.client_id:
self.client_id[client_id] = client_queue(connection_id)
else:
self.client_id[client_id].re_enable(connection_id)
return self.client_id[client_id].insert(connection_id)
logging.warn('obfs auth: no inactive client [assert]')
return False
else:
return self.client_id[client_id].insert(connection_id)
class auth_sha1(auth_base):
def __init__(self, method):
super(auth_sha1, self).__init__(method)
self.recv_buf = b''
self.unit_len = 8000
self.decrypt_packet_num = 0
self.raw_trans = False
self.has_sent_header = False
self.has_recv_header = False
self.client_id = 0
self.connection_id = 0
self.max_time_dif = 60 * 60 # time dif (second) setting
self.no_compatible_method = 'auth_sha1'
def init_data(self):
return obfs_auth_data()
def set_server_info(self, server_info):
self.server_info = server_info
try:
max_client = int(server_info.protocol_param)
except:
max_client = 64
self.server_info.data.set_max_client(max_client)
def pack_data(self, buf):
rnd_data = os.urandom(common.ord(os.urandom(1)[0]) % 16)
data = common.chr(len(rnd_data) + 1) + rnd_data + buf
data = struct.pack('>H', len(data) + 6) + data
adler32 = zlib.adler32(data) & 0xFFFFFFFF
data += struct.pack('<I', adler32)
return data
def pack_auth_data(self, buf):
if len(buf) == 0:
return b''
rnd_data = os.urandom(common.ord(os.urandom(1)[0]) % 128)
data = common.chr(len(rnd_data) + 1) + rnd_data + buf
data = struct.pack('>H', len(data) + 16) + data
crc = binascii.crc32(self.server_info.key) & 0xFFFFFFFF
data = struct.pack('<I', crc) + data
data += hmac.new(self.server_info.iv + self.server_info.key, data, hashlib.sha1).digest()[:10]
return data
def auth_data(self):
utc_time = int(time.time()) & 0xFFFFFFFF
if self.server_info.data.connection_id > 0xFF000000:
self.server_info.data.local_client_id = b''
if not self.server_info.data.local_client_id:
self.server_info.data.local_client_id = os.urandom(4)
logging.debug("local_client_id %s" % (binascii.hexlify(self.server_info.data.local_client_id),))
self.server_info.data.connection_id = struct.unpack('<I', os.urandom(4))[0] & 0xFFFFFF
self.server_info.data.connection_id += 1
return b''.join([struct.pack('<I', utc_time),
self.server_info.data.local_client_id,
struct.pack('<I', self.server_info.data.connection_id)])
def client_pre_encrypt(self, buf):
ret = b''
if not self.has_sent_header:
head_size = self.get_head_size(buf, 30)
datalen = min(len(buf), random.randint(0, 31) + head_size)
ret += self.pack_auth_data(self.auth_data() + buf[:datalen])
buf = buf[datalen:]
self.has_sent_header = True
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def client_post_decrypt(self, buf):
if self.raw_trans:
return buf
self.recv_buf += buf
out_buf = b''
while len(self.recv_buf) > 2:
length = struct.unpack('>H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) != self.recv_buf[length - 4:length]:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data uncorrect checksum')
pos = common.ord(self.recv_buf[2]) + 2
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if out_buf:
self.decrypt_packet_num += 1
return out_buf
def server_pre_encrypt(self, buf):
if self.raw_trans:
return buf
ret = b''
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def server_post_decrypt(self, buf):
if self.raw_trans:
return (buf, False)
self.recv_buf += buf
out_buf = b''
if not self.has_recv_header:
if len(self.recv_buf) < 6:
return (b'', False)
crc = struct.pack('<I', binascii.crc32(self.server_info.key) & 0xFFFFFFFF)
if crc != self.recv_buf[:4]:
return self.not_match_return(self.recv_buf)
length = struct.unpack('>H', self.recv_buf[4:6])[0]
if length > 2048:
return self.not_match_return(self.recv_buf)
if length > len(self.recv_buf):
return (b'', False)
sha1data = hmac.new(self.server_info.recv_iv + self.server_info.key, self.recv_buf[:length - 10], hashlib.sha1).digest()[:10]
if sha1data != self.recv_buf[length - 10:length]:
logging.error('auth_sha1 data uncorrect auth HMAC-SHA1')
return self.not_match_return(self.recv_buf)
pos = common.ord(self.recv_buf[6]) + 6
out_buf = self.recv_buf[pos:length - 10]
if len(out_buf) < 12:
logging.info('auth_sha1: too short, data %s' % (binascii.hexlify(self.recv_buf),))
return self.not_match_return(self.recv_buf)
utc_time = struct.unpack('<I', out_buf[:4])[0]
client_id = struct.unpack('<I', out_buf[4:8])[0]
connection_id = struct.unpack('<I', out_buf[8:12])[0]
time_dif = common.int32(utc_time - (int(time.time()) & 0xffffffff))
if time_dif < -self.max_time_dif or time_dif > self.max_time_dif \
or common.int32(utc_time - self.server_info.data.startup_time) < -self.max_time_dif / 2:
logging.info('auth_sha1: wrong timestamp, time_dif %d, data %s' % (time_dif, binascii.hexlify(out_buf),))
return self.not_match_return(self.recv_buf)
elif self.server_info.data.insert(client_id, connection_id):
self.has_recv_header = True
out_buf = out_buf[12:]
self.client_id = client_id
self.connection_id = connection_id
else:
logging.info('auth_sha1: auth fail, data %s' % (binascii.hexlify(out_buf),))
return self.not_match_return(self.recv_buf)
self.recv_buf = self.recv_buf[length:]
self.has_recv_header = True
while len(self.recv_buf) > 2:
length = struct.unpack('>H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
if self.decrypt_packet_num == 0:
logging.info('auth_sha1: over size')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) != self.recv_buf[length - 4:length]:
logging.info('auth_sha1: checksum error, data %s' % (binascii.hexlify(self.recv_buf[:length]),))
self.raw_trans = True
self.recv_buf = b''
if self.decrypt_packet_num == 0:
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data uncorrect checksum')
pos = common.ord(self.recv_buf[2]) + 2
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if out_buf:
self.server_info.data.update(self.client_id, self.connection_id)
self.decrypt_packet_num += 1
return (out_buf, False)
class obfs_auth_v2_data(object):
def __init__(self):
self.client_id = lru_cache.LRUCache()
self.local_client_id = b''
self.connection_id = 0
self.set_max_client(64) # max active client count
def update(self, client_id, connection_id):
if client_id in self.client_id:
self.client_id[client_id].update()
def set_max_client(self, max_client):
self.max_client = max_client
self.max_buffer = max(self.max_client * 2, 1024)
def insert(self, client_id, connection_id):
if self.client_id.get(client_id, None) is None or not self.client_id[client_id].enable:
if self.client_id.first() is None or len(self.client_id) < self.max_client:
if client_id not in self.client_id:
#TODO: check
self.client_id[client_id] = client_queue(connection_id)
else:
self.client_id[client_id].re_enable(connection_id)
return self.client_id[client_id].insert(connection_id)
if not self.client_id[self.client_id.first()].is_active():
del self.client_id[self.client_id.first()]
if client_id not in self.client_id:
#TODO: check
self.client_id[client_id] = client_queue(connection_id)
else:
self.client_id[client_id].re_enable(connection_id)
return self.client_id[client_id].insert(connection_id)
logging.warn('auth_sha1_v2: no inactive client')
return False
else:
return self.client_id[client_id].insert(connection_id)
class auth_sha1_v2(auth_base):
def __init__(self, method):
super(auth_sha1_v2, self).__init__(method)
self.recv_buf = b''
self.unit_len = 8100
self.decrypt_packet_num = 0
self.raw_trans = False
self.has_sent_header = False
self.has_recv_header = False
self.client_id = 0
self.connection_id = 0
self.salt = b"auth_sha1_v2"
self.no_compatible_method = 'auth_sha1_v2'
def init_data(self):
return obfs_auth_v2_data()
def set_server_info(self, server_info):
self.server_info = server_info
try:
max_client = int(server_info.protocol_param)
except:
max_client = 64
self.server_info.data.set_max_client(max_client)
def rnd_data(self, buf_size):
if buf_size > 1300:
return b'\x01'
if buf_size > 400:
rnd_data = os.urandom(common.ord(os.urandom(1)[0]) % 128)
return common.chr(len(rnd_data) + 1) + rnd_data
rnd_data = os.urandom(struct.unpack('>H', os.urandom(2))[0] % 1024)
return common.chr(255) + struct.pack('>H', len(rnd_data) + 3) + rnd_data
def pack_data(self, buf):
data = self.rnd_data(len(buf)) + buf
data = struct.pack('>H', len(data) + 6) + data
adler32 = zlib.adler32(data) & 0xFFFFFFFF
data += struct.pack('<I', adler32)
return data
def pack_auth_data(self, buf):
if len(buf) == 0:
return b''
data = self.rnd_data(len(buf)) + buf
data = struct.pack('>H', len(data) + 16) + data
crc = binascii.crc32(self.salt + self.server_info.key) & 0xFFFFFFFF
data = struct.pack('<I', crc) + data
data += hmac.new(self.server_info.iv + self.server_info.key, data, hashlib.sha1).digest()[:10]
return data
def auth_data(self):
if self.server_info.data.connection_id > 0xFF000000:
self.server_info.data.local_client_id = b''
if not self.server_info.data.local_client_id:
self.server_info.data.local_client_id = os.urandom(8)
logging.debug("local_client_id %s" % (binascii.hexlify(self.server_info.data.local_client_id),))
self.server_info.data.connection_id = struct.unpack('<Q', self.server_info.data.local_client_id)[0] % 0xFFFFFD
self.server_info.data.connection_id += 1
return b''.join([self.server_info.data.local_client_id,
struct.pack('<I', self.server_info.data.connection_id)])
def client_pre_encrypt(self, buf):
ret = b''
if not self.has_sent_header:
head_size = self.get_head_size(buf, 30)
datalen = min(len(buf), random.randint(0, 31) + head_size)
ret += self.pack_auth_data(self.auth_data() + buf[:datalen])
buf = buf[datalen:]
self.has_sent_header = True
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def client_post_decrypt(self, buf):
if self.raw_trans:
return buf
self.recv_buf += buf
out_buf = b''
while len(self.recv_buf) > 2:
length = struct.unpack('>H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) != self.recv_buf[length - 4:length]:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data uncorrect checksum')
pos = common.ord(self.recv_buf[2])
if pos < 255:
pos += 2
else:
pos = struct.unpack('>H', self.recv_buf[3:5])[0] + 2
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if out_buf:
self.decrypt_packet_num += 1
return out_buf
def server_pre_encrypt(self, buf):
if self.raw_trans:
return buf
ret = b''
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def server_post_decrypt(self, buf):
if self.raw_trans:
return (buf, False)
self.recv_buf += buf
out_buf = b''
sendback = False
if not self.has_recv_header:
if len(self.recv_buf) < 6:
return (b'', False)
crc = struct.pack('<I', binascii.crc32(self.salt + self.server_info.key) & 0xFFFFFFFF)
if crc != self.recv_buf[:4]:
return self.not_match_return(self.recv_buf)
length = struct.unpack('>H', self.recv_buf[4:6])[0]
if length > 2048:
return self.not_match_return(self.recv_buf)
if length > len(self.recv_buf):
return (b'', False)
sha1data = hmac.new(self.server_info.recv_iv + self.server_info.key, self.recv_buf[:length - 10], hashlib.sha1).digest()[:10]
if sha1data != self.recv_buf[length - 10:length]:
logging.error('auth_sha1_v2 data uncorrect auth HMAC-SHA1')
return self.not_match_return(self.recv_buf)
pos = common.ord(self.recv_buf[6])
if pos < 255:
pos += 6
else:
pos = struct.unpack('>H', self.recv_buf[7:9])[0] + 6
out_buf = self.recv_buf[pos:length - 10]
if len(out_buf) < 12:
logging.info('auth_sha1_v2: too short, data %s' % (binascii.hexlify(self.recv_buf),))
return self.not_match_return(self.recv_buf)
client_id = struct.unpack('<Q', out_buf[:8])[0]
connection_id = struct.unpack('<I', out_buf[8:12])[0]
if self.server_info.data.insert(client_id, connection_id):
self.has_recv_header = True
out_buf = out_buf[12:]
self.client_id = client_id
self.connection_id = connection_id
else:
logging.info('auth_sha1_v2: auth fail, data %s' % (binascii.hexlify(out_buf),))
return self.not_match_return(self.recv_buf)
self.recv_buf = self.recv_buf[length:]
self.has_recv_header = True
sendback = True
while len(self.recv_buf) > 2:
length = struct.unpack('>H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
if self.decrypt_packet_num == 0:
logging.info('auth_sha1_v2: over size')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) != self.recv_buf[length - 4:length]:
logging.info('auth_sha1_v2: checksum error, data %s' % (binascii.hexlify(self.recv_buf[:length]),))
self.raw_trans = True
self.recv_buf = b''
if self.decrypt_packet_num == 0:
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data uncorrect checksum')
pos = common.ord(self.recv_buf[2])
if pos < 255:
pos += 2
else:
pos = struct.unpack('>H', self.recv_buf[3:5])[0] + 2
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if pos == length - 4:
sendback = True
if out_buf:
self.server_info.data.update(self.client_id, self.connection_id)
self.decrypt_packet_num += 1
return (out_buf, sendback)
class auth_sha1_v4(auth_base):
def __init__(self, method):
super(auth_sha1_v4, self).__init__(method)
self.recv_buf = b''
self.unit_len = 8100
self.decrypt_packet_num = 0
self.raw_trans = False
self.has_sent_header = False
self.has_recv_header = False
self.client_id = 0
self.connection_id = 0
self.max_time_dif = 60 * 60 * 24 # time dif (second) setting
self.salt = b"auth_sha1_v4"
self.no_compatible_method = 'auth_sha1_v4'
def init_data(self):
return obfs_auth_v2_data()
def set_server_info(self, server_info):
self.server_info = server_info
try:
max_client = int(server_info.protocol_param)
except:
max_client = 64
self.server_info.data.set_max_client(max_client)
def rnd_data(self, buf_size):
if buf_size > 1200:
return b'\x01'
if buf_size > 400:
rnd_data = os.urandom(common.ord(os.urandom(1)[0]) % 256)
else:
rnd_data = os.urandom(struct.unpack('>H', os.urandom(2))[0] % 512)
if len(rnd_data) < 128:
return common.chr(len(rnd_data) + 1) + rnd_data
else:
return common.chr(255) + struct.pack('>H', len(rnd_data) + 3) + rnd_data
def pack_data(self, buf):
data = self.rnd_data(len(buf)) + buf
data_len = len(data) + 8
crc = binascii.crc32(struct.pack('>H', data_len)) & 0xFFFF
data = struct.pack('<H', crc) + data
data = struct.pack('>H', data_len) + data
adler32 = zlib.adler32(data) & 0xFFFFFFFF
data += struct.pack('<I', adler32)
return data
def pack_auth_data(self, buf):
if len(buf) == 0:
return b''
data = self.rnd_data(len(buf)) + buf
data_len = len(data) + 16
crc = binascii.crc32(struct.pack('>H', data_len) + self.salt + self.server_info.key) & 0xFFFFFFFF
data = struct.pack('<I', crc) + data
data = struct.pack('>H', data_len) + data
data += hmac.new(self.server_info.iv + self.server_info.key, data, hashlib.sha1).digest()[:10]
return data
def auth_data(self):
utc_time = int(time.time()) & 0xFFFFFFFF
if self.server_info.data.connection_id > 0xFF000000:
self.server_info.data.local_client_id = b''
if not self.server_info.data.local_client_id:
self.server_info.data.local_client_id = os.urandom(4)
logging.debug("local_client_id %s" % (binascii.hexlify(self.server_info.data.local_client_id),))
self.server_info.data.connection_id = struct.unpack('<I', os.urandom(4))[0] & 0xFFFFFF
self.server_info.data.connection_id += 1
return b''.join([struct.pack('<I', utc_time),
self.server_info.data.local_client_id,
struct.pack('<I', self.server_info.data.connection_id)])
def client_pre_encrypt(self, buf):
ret = b''
if not self.has_sent_header:
head_size = self.get_head_size(buf, 30)
datalen = min(len(buf), random.randint(0, 31) + head_size)
ret += self.pack_auth_data(self.auth_data() + buf[:datalen])
buf = buf[datalen:]
self.has_sent_header = True
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def client_post_decrypt(self, buf):
if self.raw_trans:
return buf
self.recv_buf += buf
out_buf = b''
while len(self.recv_buf) > 4:
crc = struct.pack('<H', binascii.crc32(self.recv_buf[:2]) & 0xFFFF)
if crc != self.recv_buf[2:4]:
raise Exception('client_post_decrypt data uncorrect crc')
length = struct.unpack('>H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) != self.recv_buf[length - 4:length]:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data uncorrect checksum')
pos = common.ord(self.recv_buf[4])
if pos < 255:
pos += 4
else:
pos = struct.unpack('>H', self.recv_buf[5:7])[0] + 4
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if out_buf:
self.decrypt_packet_num += 1
return out_buf
def server_pre_encrypt(self, buf):
if self.raw_trans:
return buf
ret = b''
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def server_post_decrypt(self, buf):
if self.raw_trans:
return (buf, False)
self.recv_buf += buf
out_buf = b''
sendback = False
if not self.has_recv_header:
if len(self.recv_buf) <= 6:
return (b'', False)
crc = struct.pack('<I', binascii.crc32(self.recv_buf[:2] + self.salt + self.server_info.key) & 0xFFFFFFFF)
if crc != self.recv_buf[2:6]:
return self.not_match_return(self.recv_buf)
length = struct.unpack('>H', self.recv_buf[:2])[0]
if length > len(self.recv_buf):
return (b'', False)
sha1data = hmac.new(self.server_info.recv_iv + self.server_info.key, self.recv_buf[:length - 10], hashlib.sha1).digest()[:10]
if sha1data != self.recv_buf[length - 10:length]:
logging.error('auth_sha1_v4 data uncorrect auth HMAC-SHA1')
return self.not_match_return(self.recv_buf)
pos = common.ord(self.recv_buf[6])
if pos < 255:
pos += 6
else:
pos = struct.unpack('>H', self.recv_buf[7:9])[0] + 6
out_buf = self.recv_buf[pos:length - 10]
if len(out_buf) < 12:
logging.info('auth_sha1_v4: too short, data %s' % (binascii.hexlify(self.recv_buf),))
return self.not_match_return(self.recv_buf)
utc_time = struct.unpack('<I', out_buf[:4])[0]
client_id = struct.unpack('<I', out_buf[4:8])[0]
connection_id = struct.unpack('<I', out_buf[8:12])[0]
time_dif = common.int32(utc_time - (int(time.time()) & 0xffffffff))
if time_dif < -self.max_time_dif or time_dif > self.max_time_dif:
logging.info('auth_sha1_v4: wrong timestamp, time_dif %d, data %s' % (time_dif, binascii.hexlify(out_buf),))
return self.not_match_return(self.recv_buf)
elif self.server_info.data.insert(client_id, connection_id):
self.has_recv_header = True
out_buf = out_buf[12:]
self.client_id = client_id
self.connection_id = connection_id
else:
logging.info('auth_sha1_v4: auth fail, data %s' % (binascii.hexlify(out_buf),))
return self.not_match_return(self.recv_buf)
self.recv_buf = self.recv_buf[length:]
self.has_recv_header = True
sendback = True
while len(self.recv_buf) > 4:
crc = struct.pack('<H', binascii.crc32(self.recv_buf[:2]) & 0xFFFF)
if crc != self.recv_buf[2:4]:
self.raw_trans = True
logging.info('auth_sha1_v4: wrong crc')
if self.decrypt_packet_num == 0:
logging.info('auth_sha1_v4: wrong crc')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
length = struct.unpack('>H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
if self.decrypt_packet_num == 0:
logging.info('auth_sha1_v4: over size')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) != self.recv_buf[length - 4:length]:
logging.info('auth_sha1_v4: checksum error, data %s' % (binascii.hexlify(self.recv_buf[:length]),))
self.raw_trans = True
self.recv_buf = b''
if self.decrypt_packet_num == 0:
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data uncorrect checksum')
pos = common.ord(self.recv_buf[4])
if pos < 255:
pos += 4
else:
pos = struct.unpack('>H', self.recv_buf[5:7])[0] + 4
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if pos == length - 4:
sendback = True
if out_buf:
self.server_info.data.update(self.client_id, self.connection_id)
self.decrypt_packet_num += 1
return (out_buf, sendback)
class auth_aes128(auth_base):
def __init__(self, method):
super(auth_aes128, self).__init__(method)
self.recv_buf = b''
self.unit_len = 8100
self.raw_trans = False
self.has_sent_header = False
self.has_recv_header = False
self.client_id = 0
self.connection_id = 0
self.max_time_dif = 60 * 60 * 24 # time dif (second) setting
self.salt = b"auth_aes128"
self.no_compatible_method = 'auth_aes128'
self.extra_wait_size = struct.unpack('>H', os.urandom(2))[0] % 1024
self.pack_id = 0
self.recv_id = 0
def init_data(self):
return obfs_auth_v2_data()
def set_server_info(self, server_info):
self.server_info = server_info
try:
max_client = int(server_info.protocol_param)
except:
max_client = 64
self.server_info.data.set_max_client(max_client)
def rnd_data(self, buf_size):
if buf_size > 1200:
return b'\x01'
if self.pack_id > 4:
rnd_data = os.urandom(common.ord(os.urandom(1)[0]) % 32)
elif buf_size > 900:
rnd_data = os.urandom(common.ord(os.urandom(1)[0]) % 128)
else:
rnd_data = os.urandom(struct.unpack('>H', os.urandom(2))[0] % 512)
if len(rnd_data) < 128:
return common.chr(len(rnd_data) + 1) + rnd_data
else:
return common.chr(255) + struct.pack('<H', len(rnd_data) + 3) + rnd_data
def pack_data(self, buf):
data = self.rnd_data(len(buf)) + buf
data_len = len(data) + 8
crc = binascii.crc32(struct.pack('<H', data_len)) & 0xFFFF
data = struct.pack('<H', crc) + data
data = struct.pack('<H', data_len) + data
adler32 = (zlib.adler32(data) & 0xFFFFFFFF) ^ self.pack_id
self.pack_id = (self.pack_id + 1) & 0xFFFFFFFF
data += struct.pack('<I', adler32)
return data
def pack_auth_data(self, auth_data, buf):
if len(buf) == 0:
return b''
if len(buf) > 400:
rnd_len = common.ord(os.urandom(1)[0]) % 512
else:
rnd_len = struct.unpack('<H', os.urandom(2))[0] % 1024
data = auth_data
data_len = 4 + 16 + 10 + len(buf) + rnd_len + 4
data = data + struct.pack('<H', data_len) + struct.pack('<H', rnd_len)
uid = os.urandom(4)
encryptor = encrypt.Encryptor(to_bytes(base64.b64encode(uid + self.server_info.key)) + self.salt, 'aes-128-cbc', b'\x00' * 16)
data = uid + encryptor.encrypt(data)[16:]
data += hmac.new(self.server_info.iv + self.server_info.key, data, hashlib.sha1).digest()[:10]
data += os.urandom(rnd_len) + buf
data += struct.pack('<I', (zlib.adler32(data) & 0xFFFFFFFF))
return data
def auth_data(self):
utc_time = int(time.time()) & 0xFFFFFFFF
if self.server_info.data.connection_id > 0xFF000000:
self.server_info.data.local_client_id = b''
if not self.server_info.data.local_client_id:
self.server_info.data.local_client_id = os.urandom(4)
logging.debug("local_client_id %s" % (binascii.hexlify(self.server_info.data.local_client_id),))
self.server_info.data.connection_id = struct.unpack('<I', os.urandom(4))[0] & 0xFFFFFF
self.server_info.data.connection_id += 1
return b''.join([struct.pack('<I', utc_time),
self.server_info.data.local_client_id,
struct.pack('<I', self.server_info.data.connection_id)])
def client_pre_encrypt(self, buf):
ret = b''
if not self.has_sent_header:
head_size = self.get_head_size(buf, 30)
datalen = min(len(buf), random.randint(0, 31) + head_size)
ret += self.pack_auth_data(self.auth_data(), buf[:datalen])
buf = buf[datalen:]
self.has_sent_header = True
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def client_post_decrypt(self, buf):
if self.raw_trans:
return buf
self.recv_buf += buf
out_buf = b''
while len(self.recv_buf) > 4:
crc = struct.pack('<H', binascii.crc32(self.recv_buf[:2]) & 0xFFFF)
if crc != self.recv_buf[2:4]:
raise Exception('client_post_decrypt data uncorrect crc')
length = struct.unpack('<H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', (zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) ^ self.recv_id) != self.recv_buf[length - 4:length]:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data uncorrect checksum')
self.recv_id = (self.recv_id + 1) & 0xFFFFFFFF
pos = common.ord(self.recv_buf[4])
if pos < 255:
pos += 4
else:
pos = struct.unpack('<H', self.recv_buf[5:7])[0] + 4
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
return out_buf
def server_pre_encrypt(self, buf):
if self.raw_trans:
return buf
ret = b''
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len])
buf = buf[self.unit_len:]
ret += self.pack_data(buf)
return ret
def server_post_decrypt(self, buf):
if self.raw_trans:
return (buf, False)
self.recv_buf += buf
out_buf = b''
sendback = False
if not self.has_recv_header:
if len(self.recv_buf) < 30:
return (b'', False)
sha1data = hmac.new(self.server_info.recv_iv + self.server_info.key, self.recv_buf[:20], hashlib.sha1).digest()[:10]
if sha1data != self.recv_buf[20:30]:
logging.error('auth_aes128 data uncorrect auth HMAC-SHA1 from %s:%d, data %s' % (self.server_info.client, self.server_info.client_port, binascii.hexlify(self.recv_buf)))
if len(self.recv_buf) < 30 + self.extra_wait_size:
return (b'', False)
return self.not_match_return(self.recv_buf)
user_key = self.recv_buf[:4]
encryptor = encrypt.Encryptor(to_bytes(base64.b64encode(user_key + self.server_info.key)) + self.salt, 'aes-128-cbc')
head = encryptor.decrypt(b'\x00' * 16 + self.recv_buf[4:20] + b'\x00') # need an extra byte or recv empty
length = struct.unpack('<H', head[12:14])[0]
if len(self.recv_buf) < length:
return (b'', False)
utc_time = struct.unpack('<I', head[:4])[0]
client_id = struct.unpack('<I', head[4:8])[0]
connection_id = struct.unpack('<I', head[8:12])[0]
rnd_len = struct.unpack('<H', head[14:16])[0]
if struct.pack('<I', zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) != self.recv_buf[length - 4:length]:
logging.info('auth_aes128: checksum error, data %s' % (binascii.hexlify(self.recv_buf[:length]),))
return self.not_match_return(self.recv_buf)
time_dif = common.int32(utc_time - (int(time.time()) & 0xffffffff))
if time_dif < -self.max_time_dif or time_dif > self.max_time_dif:
logging.info('auth_aes128: wrong timestamp, time_dif %d, data %s' % (time_dif, binascii.hexlify(head),))
return self.not_match_return(self.recv_buf)
elif self.server_info.data.insert(client_id, connection_id):
self.has_recv_header = True
out_buf = self.recv_buf[30 + rnd_len:length - 4]
self.client_id = client_id
self.connection_id = connection_id
else:
logging.info('auth_aes128: auth fail, data %s' % (binascii.hexlify(out_buf),))
return self.not_match_return(self.recv_buf)
self.recv_buf = self.recv_buf[length:]
self.has_recv_header = True
sendback = True
while len(self.recv_buf) > 4:
crc = struct.pack('<H', binascii.crc32(self.recv_buf[:2]) & 0xFFFF)
if crc != self.recv_buf[2:4]:
self.raw_trans = True
logging.info('auth_aes128: wrong crc')
if self.recv_id == 0:
logging.info('auth_aes128: wrong crc')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
length = struct.unpack('<H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
if self.recv_id == 0:
logging.info('auth_aes128: over size')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
if length > len(self.recv_buf):
break
if struct.pack('<I', (zlib.adler32(self.recv_buf[:length - 4]) & 0xFFFFFFFF) ^ self.recv_id) != self.recv_buf[length - 4:length]:
logging.info('auth_aes128: checksum error, data %s' % (binascii.hexlify(self.recv_buf[:length]),))
self.raw_trans = True
self.recv_buf = b''
if self.recv_id == 0:
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data uncorrect checksum')
self.recv_id = (self.recv_id + 1) & 0xFFFFFFFF
pos = common.ord(self.recv_buf[4])
if pos < 255:
pos += 4
else:
pos = struct.unpack('<H', self.recv_buf[5:7])[0] + 4
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if pos == length - 4:
sendback = True
if out_buf:
self.server_info.data.update(self.client_id, self.connection_id)
return (out_buf, sendback)
def client_udp_pre_encrypt(self, buf):
return buf + struct.pack('<I', zlib.adler32(buf) & 0xFFFFFFFF)
def client_udp_post_decrypt(self, buf):
length = len(buf)
data = buf[:-4]
if struct.pack('<I', zlib.adler32(data) & 0xFFFFFFFF) != buf[length - 4:]:
return b''
return data
def server_udp_pre_encrypt(self, buf):
return buf + struct.pack('<I', zlib.adler32(buf) & 0xFFFFFFFF)
def server_udp_post_decrypt(self, buf):
length = len(buf)
data = buf[:-4]
if struct.pack('<I', zlib.adler32(data) & 0xFFFFFFFF) != buf[length - 4:]:
return (b'', None)
return (data, None)
class obfs_auth_mu_data(object):
def __init__(self):
self.user_id = {}
self.local_client_id = b''
self.connection_id = 0
self.set_max_client(64) # max active client count
def update(self, user_id, client_id, connection_id):
if user_id not in self.user_id:
self.user_id[user_id] = lru_cache.LRUCache()
local_client_id = self.user_id[user_id]
if client_id in local_client_id:
local_client_id[client_id].update()
def set_max_client(self, max_client):
self.max_client = max_client
self.max_buffer = max(self.max_client * 2, 1024)
def insert(self, user_id, client_id, connection_id):
if user_id not in self.user_id:
self.user_id[user_id] = lru_cache.LRUCache()
local_client_id = self.user_id[user_id]
if local_client_id.get(client_id, None) is None or not local_client_id[client_id].enable:
if local_client_id.first() is None or len(local_client_id) < self.max_client:
if client_id not in local_client_id:
#TODO: check
local_client_id[client_id] = client_queue(connection_id)
else:
local_client_id[client_id].re_enable(connection_id)
return local_client_id[client_id].insert(connection_id)
if not local_client_id[local_client_id.first()].is_active():
del local_client_id[local_client_id.first()]
if client_id not in local_client_id:
#TODO: check
local_client_id[client_id] = client_queue(connection_id)
else:
local_client_id[client_id].re_enable(connection_id)
return local_client_id[client_id].insert(connection_id)
logging.warn('auth_aes128: no inactive client')
return False
else:
return local_client_id[client_id].insert(connection_id)
class auth_aes128_sha1(auth_base):
def __init__(self, method, hashfunc):
super(auth_aes128_sha1, self).__init__(method)
self.hashfunc = hashfunc
self.recv_buf = b''
self.unit_len = 8100
self.raw_trans = False
self.has_sent_header = False
self.has_recv_header = False
self.client_id = 0
self.connection_id = 0
self.max_time_dif = 60 * 60 * 24 # time dif (second) setting
self.salt = hashfunc == hashlib.md5 and b"auth_aes128_md5" or b"auth_aes128_sha1"
self.no_compatible_method = hashfunc == hashlib.md5 and "auth_aes128_md5" or 'auth_aes128_sha1'
self.extra_wait_size = struct.unpack('>H', os.urandom(2))[0] % 1024
self.pack_id = 1
self.recv_id = 1
self.user_id = None
self.user_key = None
self.last_rnd_len = 0
def init_data(self):
return obfs_auth_mu_data()
def set_server_info(self, server_info):
self.server_info = server_info
try:
max_client = int(server_info.protocol_param.split('#')[0])
except:
max_client = 64
self.server_info.data.set_max_client(max_client)
def trapezoid_random_float(self, d):
if d == 0:
return random.random()
s = random.random()
a = 1 - d
return (math.sqrt(a * a + 4 * d * s) - a) / (2 * d)
def trapezoid_random_int(self, max_val, d):
v = self.trapezoid_random_float(d)
return int(v * max_val)
def rnd_data_len(self, buf_size, full_buf_size):
rev_len = self.server_info.tcp_mss - buf_size - 9
if rev_len <= 0 or self.last_rnd_len >= self.server_info.buffer_size or full_buf_size >= self.server_info.buffer_size:
return 0
if buf_size > 900:
return struct.unpack('>H', os.urandom(2))[0] % rev_len
return self.trapezoid_random_int(rev_len, -0.3)
def rnd_data(self, buf_size, full_buf_size):
data_len = self.rnd_data_len(buf_size, full_buf_size)
if data_len < 128:
return common.chr(data_len + 1) + os.urandom(data_len)
return common.chr(255) + struct.pack('<H', data_len + 1) + os.urandom(data_len - 2)
def pack_data(self, buf, full_buf_size):
data = self.rnd_data(len(buf), full_buf_size) + buf
data_len = len(data) + 8
mac_key = self.user_key + struct.pack('<I', self.pack_id)
mac = hmac.new(mac_key, struct.pack('<H', data_len), self.hashfunc).digest()[:2]
data = struct.pack('<H', data_len) + mac + data
data += hmac.new(mac_key, data, self.hashfunc).digest()[:4]
self.pack_id = (self.pack_id + 1) & 0xFFFFFFFF
return data
def pack_auth_data(self, auth_data, buf):
if len(buf) == 0:
return b''
if len(buf) > 400:
rnd_len = struct.unpack('<H', os.urandom(2))[0] % 512
else:
rnd_len = struct.unpack('<H', os.urandom(2))[0] % 1024
data = auth_data
data_len = 7 + 4 + 16 + 4 + len(buf) + rnd_len + 4
data = data + struct.pack('<H', data_len) + struct.pack('<H', rnd_len)
mac_key = self.server_info.iv + self.server_info.key
uid = os.urandom(4)
if b':' in to_bytes(self.server_info.protocol_param):
try:
items = to_bytes(self.server_info.protocol_param).split(b':')
self.user_key = self.hashfunc(items[1]).digest()
uid = struct.pack('<I', int(items[0]))
except:
pass
if self.user_key is None:
self.user_key = self.server_info.key
encryptor = encrypt.Encryptor(to_bytes(base64.b64encode(self.user_key)) + self.salt, 'aes-128-cbc', b'\x00' * 16)
data = uid + encryptor.encrypt(data)[16:]
data += hmac.new(mac_key, data, self.hashfunc).digest()[:4]
check_head = os.urandom(1)
check_head += hmac.new(mac_key, check_head, self.hashfunc).digest()[:6]
data = check_head + data + os.urandom(rnd_len) + buf
data += hmac.new(self.user_key, data, self.hashfunc).digest()[:4]
return data
def auth_data(self):
utc_time = int(time.time()) & 0xFFFFFFFF
if self.server_info.data.connection_id > 0xFF000000:
self.server_info.data.local_client_id = b''
if not self.server_info.data.local_client_id:
self.server_info.data.local_client_id = os.urandom(4)
logging.debug("local_client_id %s" % (binascii.hexlify(self.server_info.data.local_client_id),))
self.server_info.data.connection_id = struct.unpack('<I', os.urandom(4))[0] & 0xFFFFFF
self.server_info.data.connection_id += 1
return b''.join([struct.pack('<I', utc_time),
self.server_info.data.local_client_id,
struct.pack('<I', self.server_info.data.connection_id)])
def client_pre_encrypt(self, buf):
ret = b''
ogn_data_len = len(buf)
if not self.has_sent_header:
head_size = self.get_head_size(buf, 30)
datalen = min(len(buf), random.randint(0, 31) + head_size)
ret += self.pack_auth_data(self.auth_data(), buf[:datalen])
buf = buf[datalen:]
self.has_sent_header = True
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len], ogn_data_len)
buf = buf[self.unit_len:]
ret += self.pack_data(buf, ogn_data_len)
self.last_rnd_len = ogn_data_len
return ret
def client_post_decrypt(self, buf):
if self.raw_trans:
return buf
self.recv_buf += buf
out_buf = b''
while len(self.recv_buf) > 4:
mac_key = self.user_key + struct.pack('<I', self.recv_id)
mac = hmac.new(mac_key, self.recv_buf[:2], self.hashfunc).digest()[:2]
if mac != self.recv_buf[2:4]:
raise Exception('client_post_decrypt data uncorrect mac')
length = struct.unpack('<H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data error')
if length > len(self.recv_buf):
break
if hmac.new(mac_key, self.recv_buf[:length - 4], self.hashfunc).digest()[:4] != self.recv_buf[length - 4:length]:
self.raw_trans = True
self.recv_buf = b''
raise Exception('client_post_decrypt data uncorrect checksum')
self.recv_id = (self.recv_id + 1) & 0xFFFFFFFF
pos = common.ord(self.recv_buf[4])
if pos < 255:
pos += 4
else:
pos = struct.unpack('<H', self.recv_buf[5:7])[0] + 4
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
return out_buf
def server_pre_encrypt(self, buf):
if self.raw_trans:
return buf
ret = b''
ogn_data_len = len(buf)
while len(buf) > self.unit_len:
ret += self.pack_data(buf[:self.unit_len], ogn_data_len)
buf = buf[self.unit_len:]
ret += self.pack_data(buf, ogn_data_len)
self.last_rnd_len = ogn_data_len
return ret
def server_post_decrypt(self, buf):
if self.raw_trans:
return (buf, False)
self.recv_buf += buf
out_buf = b''
sendback = False
if not self.has_recv_header:
if len(self.recv_buf) >= 7 or len(self.recv_buf) in [2, 3]:
recv_len = min(len(self.recv_buf), 7)
mac_key = self.server_info.recv_iv + self.server_info.key
sha1data = hmac.new(mac_key, self.recv_buf[:1], self.hashfunc).digest()[:recv_len - 1]
if sha1data != self.recv_buf[1:recv_len]:
return self.not_match_return(self.recv_buf)
if len(self.recv_buf) < 31:
return (b'', False)
sha1data = hmac.new(mac_key, self.recv_buf[7:27], self.hashfunc).digest()[:4]
if sha1data != self.recv_buf[27:31]:
logging.error('%s data uncorrect auth HMAC-SHA1 from %s:%d, data %s' % (self.no_compatible_method, self.server_info.client, self.server_info.client_port, binascii.hexlify(self.recv_buf)))
if len(self.recv_buf) < 31 + self.extra_wait_size:
return (b'', False)
return self.not_match_return(self.recv_buf)
uid = self.recv_buf[7:11]
if uid in self.server_info.users:
self.user_id = uid
self.user_key = self.hashfunc(self.server_info.users[uid]).digest()
self.server_info.update_user_func(uid)
else:
if not self.server_info.users:
self.user_key = self.server_info.key
else:
self.user_key = self.server_info.recv_iv
encryptor = encrypt.Encryptor(to_bytes(base64.b64encode(self.user_key)) + self.salt, 'aes-128-cbc')
head = encryptor.decrypt(b'\x00' * 16 + self.recv_buf[11:27] + b'\x00') # need an extra byte or recv empty
length = struct.unpack('<H', head[12:14])[0]
if len(self.recv_buf) < length:
return (b'', False)
utc_time = struct.unpack('<I', head[:4])[0]
client_id = struct.unpack('<I', head[4:8])[0]
connection_id = struct.unpack('<I', head[8:12])[0]
rnd_len = struct.unpack('<H', head[14:16])[0]
if hmac.new(self.user_key, self.recv_buf[:length - 4], self.hashfunc).digest()[:4] != self.recv_buf[length - 4:length]:
logging.info('%s: checksum error, data %s' % (self.no_compatible_method, binascii.hexlify(self.recv_buf[:length])))
return self.not_match_return(self.recv_buf)
time_dif = common.int32(utc_time - (int(time.time()) & 0xffffffff))
if time_dif < -self.max_time_dif or time_dif > self.max_time_dif:
logging.info('%s: wrong timestamp, time_dif %d, data %s' % (self.no_compatible_method, time_dif, binascii.hexlify(head)))
return self.not_match_return(self.recv_buf)
elif self.server_info.data.insert(self.user_id, client_id, connection_id):
self.has_recv_header = True
out_buf = self.recv_buf[31 + rnd_len:length - 4]
self.client_id = client_id
self.connection_id = connection_id
else:
logging.info('%s: auth fail, data %s' % (self.no_compatible_method, binascii.hexlify(out_buf)))
return self.not_match_return(self.recv_buf)
self.recv_buf = self.recv_buf[length:]
self.has_recv_header = True
sendback = True
while len(self.recv_buf) > 4:
mac_key = self.user_key + struct.pack('<I', self.recv_id)
mac = hmac.new(mac_key, self.recv_buf[:2], self.hashfunc).digest()[:2]
if mac != self.recv_buf[2:4]:
self.raw_trans = True
logging.info(self.no_compatible_method + ': wrong crc')
if self.recv_id == 0:
logging.info(self.no_compatible_method + ': wrong crc')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
length = struct.unpack('<H', self.recv_buf[:2])[0]
if length >= 8192 or length < 7:
self.raw_trans = True
self.recv_buf = b''
if self.recv_id == 0:
logging.info(self.no_compatible_method + ': over size')
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data error')
if length > len(self.recv_buf):
break
if hmac.new(mac_key, self.recv_buf[:length - 4], self.hashfunc).digest()[:4] != self.recv_buf[length - 4:length]:
logging.info('%s: checksum error, data %s' % (self.no_compatible_method, binascii.hexlify(self.recv_buf[:length])))
self.raw_trans = True
self.recv_buf = b''
if self.recv_id == 0:
return (b'E'*2048, False)
else:
raise Exception('server_post_decrype data uncorrect checksum')
self.recv_id = (self.recv_id + 1) & 0xFFFFFFFF
pos = common.ord(self.recv_buf[4])
if pos < 255:
pos += 4
else:
pos = struct.unpack('<H', self.recv_buf[5:7])[0] + 4
out_buf += self.recv_buf[pos:length - 4]
self.recv_buf = self.recv_buf[length:]
if pos == length - 4:
sendback = True
if out_buf:
self.server_info.data.update(self.user_id, self.client_id, self.connection_id)
return (out_buf, sendback)
def client_udp_pre_encrypt(self, buf):
if self.user_key is None:
if b':' in to_bytes(self.server_info.protocol_param):
try:
items = to_bytes(self.server_info.protocol_param).split(':')
self.user_key = self.hashfunc(items[1]).digest()
self.user_id = struct.pack('<I', int(items[0]))
except:
pass
if self.user_key is None:
self.user_id = os.urandom(4)
self.user_key = self.server_info.key
buf += self.user_id
return buf + hmac.new(self.user_key, buf, self.hashfunc).digest()[:4]
def client_udp_post_decrypt(self, buf):
user_key = self.server_info.key
if hmac.new(user_key, buf[:-4], self.hashfunc).digest()[:4] != buf[-4:]:
return b''
return buf[:-4]
def server_udp_pre_encrypt(self, buf):
user_key = self.server_info.key
return buf + hmac.new(user_key, buf, self.hashfunc).digest()[:4]
def server_udp_post_decrypt(self, buf):
uid = buf[-8:-4]
if uid in self.server_info.users:
user_key = self.hashfunc(self.server_info.users[uid]).digest()
else:
uid = None
if not self.server_info.users:
user_key = self.server_info.key
else:
user_key = self.server_info.recv_iv
if hmac.new(user_key, buf[:-4], self.hashfunc).digest()[:4] != buf[-4:]:
return (b'', None)
return (buf[:-8], uid)
| 42.995946
| 204
| 0.561209
| 8,380
| 63,634
| 4.029236
| 0.037709
| 0.06255
| 0.079491
| 0.033052
| 0.896964
| 0.869688
| 0.847801
| 0.813801
| 0.788153
| 0.774204
| 0
| 0.031312
| 0.324968
| 63,634
| 1,479
| 205
| 43.025017
| 0.754743
| 0.013483
| 0
| 0.774268
| 0
| 0
| 0.049373
| 0.000751
| 0
| 0
| 0.008618
| 0.000676
| 0.00077
| 1
| 0.075501
| false
| 0.002311
| 0.014638
| 0.014638
| 0.231895
| 0.00077
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b1a9dc5da7b47ba175a74c1e0b26ccaf251664f
| 164
|
py
|
Python
|
tests/test_hello.py
|
cuencandres/minpkg
|
0779328777fd1bc32f678c5054a3c48c50ea5d8b
|
[
"MIT"
] | null | null | null |
tests/test_hello.py
|
cuencandres/minpkg
|
0779328777fd1bc32f678c5054a3c48c50ea5d8b
|
[
"MIT"
] | null | null | null |
tests/test_hello.py
|
cuencandres/minpkg
|
0779328777fd1bc32f678c5054a3c48c50ea5d8b
|
[
"MIT"
] | 1
|
2020-01-17T18:41:21.000Z
|
2020-01-17T18:41:21.000Z
|
from minpkg import say_hello, say_goodbye
def test_say_hello():
assert say_hello() == 'hello'
def test_say_goodbye():
assert say_goodbye() == 'Goodbye!!'
| 20.5
| 41
| 0.707317
| 23
| 164
| 4.695652
| 0.391304
| 0.222222
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164634
| 164
| 7
| 42
| 23.428571
| 0.788321
| 0
| 0
| 0
| 0
| 0
| 0.085366
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
6b40649b97c201deea23d0c75bea49f836ef8c05
| 15,822
|
py
|
Python
|
wo/cli/plugins/stack_services.py
|
xwang0929/WordOps
|
d608e78bebcfe3041bd86c8cc6d379e4023368a0
|
[
"MIT"
] | 1
|
2020-03-30T16:09:50.000Z
|
2020-03-30T16:09:50.000Z
|
wo/cli/plugins/stack_services.py
|
xwang0929/WordOps
|
d608e78bebcfe3041bd86c8cc6d379e4023368a0
|
[
"MIT"
] | null | null | null |
wo/cli/plugins/stack_services.py
|
xwang0929/WordOps
|
d608e78bebcfe3041bd86c8cc6d379e4023368a0
|
[
"MIT"
] | null | null | null |
import os
from cement.core.controller import CementBaseController, expose
from wo.core.logging import Log
from wo.core.services import WOService
from wo.core.variables import WOVar
class WOStackStatusController(CementBaseController):
class Meta:
label = 'stack_services'
stacked_on = 'stack'
stacked_type = 'embedded'
description = 'Check the stack status'
@expose(help="Start stack services")
def start(self):
"""Start services"""
services = []
wo_system = "/lib/systemd/system/"
pargs = self.app.pargs
if not (pargs.nginx or pargs.php or
pargs.php73 or
pargs.mysql or
pargs.redis or
pargs.fail2ban or
pargs.proftpd or
pargs.netdata):
pargs.nginx = True
pargs.php = True
pargs.mysql = True
pargs.fail2ban = True
pargs.netdata = True
if pargs.nginx:
if os.path.exists('{0}'.format(wo_system) + 'nginx.service'):
services = services + ['nginx']
else:
Log.info(self, "Nginx is not installed")
if pargs.php:
if os.path.exists('{0}'.format(wo_system) + 'php7.2-fpm.service'):
services = services + ['php7.2-fpm']
else:
Log.info(self, "PHP7.2-FPM is not installed")
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.php73:
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.mysql:
if ((WOVar.wo_mysql_host == "localhost") or
(WOVar.wo_mysql_host == "127.0.0.1")):
if os.path.exists('/etc/systemd/system/mysql.service'):
services = services + ['mysql']
else:
Log.info(self, "MySQL is not installed")
else:
Log.warn(self, "Remote MySQL found, "
"Unable to check MySQL service status")
if pargs.redis:
if os.path.exists('{0}'.format(wo_system) +
'redis-server.service'):
services = services + ['redis-server']
else:
Log.info(self, "Redis server is not installed")
if pargs.fail2ban:
if os.path.exists('{0}'.format(wo_system) + 'fail2ban.service'):
services = services + ['fail2ban']
else:
Log.info(self, "fail2ban is not installed")
# proftpd
if pargs.proftpd:
if os.path.exists('/etc/init.d/proftpd'):
services = services + ['proftpd']
else:
Log.info(self, "ProFTPd is not installed")
# netdata
if pargs.netdata:
if os.path.exists('{0}'.format(wo_system) + 'netdata.service'):
services = services + ['netdata']
else:
Log.info(self, "Netdata is not installed")
for service in services:
Log.debug(self, "Starting service: {0}".format(service))
WOService.start_service(self, service)
@expose(help="Stop stack services")
def stop(self):
"""Stop services"""
services = []
wo_system = "/lib/systemd/system/"
pargs = self.app.pargs
if not (pargs.nginx or pargs.php or
pargs.php73 or
pargs.mysql or
pargs.fail2ban or
pargs.netdata or
pargs.proftpd or
pargs.redis):
pargs.nginx = True
pargs.php = True
pargs.mysql = True
if pargs.nginx:
if os.path.exists('{0}'.format(wo_system) + 'nginx.service'):
services = services + ['nginx']
else:
Log.info(self, "Nginx is not installed")
if pargs.php:
if os.path.exists('{0}'.format(wo_system) + 'php7.2-fpm.service'):
services = services + ['php7.2-fpm']
else:
Log.info(self, "PHP7.2-FPM is not installed")
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.php73:
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.mysql:
if ((WOVar.wo_mysql_host == "localhost") or
(WOVar.wo_mysql_host == "127.0.0.1")):
if os.path.exists('/etc/systemd/system/mysql.service'):
services = services + ['mysql']
else:
Log.info(self, "MySQL is not installed")
else:
Log.warn(self, "Remote MySQL found, "
"Unable to check MySQL service status")
if pargs.redis:
if os.path.exists('{0}'.format(wo_system) +
'redis-server.service'):
services = services + ['redis-server']
else:
Log.info(self, "Redis server is not installed")
if pargs.fail2ban:
if os.path.exists('{0}'.format(wo_system) + 'fail2ban.service'):
services = services + ['fail2ban']
else:
Log.info(self, "fail2ban is not installed")
# proftpd
if pargs.proftpd:
if os.path.exists('/etc/init.d/proftpd'):
services = services + ['proftpd']
else:
Log.info(self, "ProFTPd is not installed")
# netdata
if pargs.netdata:
if os.path.exists('{0}'.format(wo_system) + 'netdata.service'):
services = services + ['netdata']
else:
Log.info(self, "Netdata is not installed")
for service in services:
Log.debug(self, "Stopping service: {0}".format(service))
WOService.stop_service(self, service)
@expose(help="Restart stack services")
def restart(self):
"""Restart services"""
services = []
wo_system = "/lib/systemd/system/"
pargs = self.app.pargs
if not (pargs.nginx or pargs.php or
pargs.php73 or
pargs.mysql or
pargs.netdata or
pargs.proftpd or
pargs.redis or
pargs.fail2ban):
pargs.nginx = True
pargs.php = True
pargs.mysql = True
pargs.netdata = True
if pargs.nginx:
if os.path.exists('{0}'.format(wo_system) + 'nginx.service'):
services = services + ['nginx']
else:
Log.info(self, "Nginx is not installed")
if pargs.php:
if os.path.exists('{0}'.format(wo_system) + 'php7.2-fpm.service'):
services = services + ['php7.2-fpm']
else:
Log.info(self, "PHP7.2-FPM is not installed")
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.php73:
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.mysql:
if ((WOVar.wo_mysql_host == "localhost") or
(WOVar.wo_mysql_host == "127.0.0.1")):
if os.path.exists('/etc/systemd/system/mysql.service'):
services = services + ['mysql']
else:
Log.info(self, "MySQL is not installed")
else:
Log.warn(self, "Remote MySQL found, "
"Unable to check MySQL service status")
if pargs.redis:
if os.path.exists('{0}'.format(wo_system) +
'redis-server.service'):
services = services + ['redis-server']
else:
Log.info(self, "Redis server is not installed")
if pargs.fail2ban:
if os.path.exists('{0}'.format(wo_system) + 'fail2ban.service'):
services = services + ['fail2ban']
else:
Log.info(self, "fail2ban is not installed")
# proftpd
if pargs.proftpd:
if os.path.exists('/etc/init.d/proftpd'):
services = services + ['proftpd']
else:
Log.info(self, "ProFTPd is not installed")
# netdata
if pargs.netdata:
if os.path.exists('{0}'.format(wo_system) + 'netdata.service'):
services = services + ['netdata']
else:
Log.info(self, "Netdata is not installed")
for service in services:
Log.debug(self, "Restarting service: {0}".format(service))
WOService.restart_service(self, service)
@expose(help="Get stack status")
def status(self):
"""Status of services"""
services = []
wo_system = "/lib/systemd/system/"
pargs = self.app.pargs
if not (pargs.nginx or pargs.php or
pargs.php73 or
pargs.mysql or
pargs.netdata or
pargs.proftpd or
pargs.redis or
pargs.fail2ban):
pargs.nginx = True
pargs.php = True
pargs.mysql = True
pargs.fail2ban = True
pargs.netdata = True
if pargs.nginx:
if os.path.exists('{0}'.format(wo_system) + 'nginx.service'):
services = services + ['nginx']
else:
Log.info(self, "Nginx is not installed")
if pargs.php:
if os.path.exists('{0}'.format(wo_system) + 'php7.2-fpm.service'):
services = services + ['php7.2-fpm']
else:
Log.info(self, "PHP7.2-FPM is not installed")
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.php73:
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.mysql:
if ((WOVar.wo_mysql_host == "localhost") or
(WOVar.wo_mysql_host == "127.0.0.1")):
if os.path.exists('/etc/systemd/system/mysql.service'):
services = services + ['mysql']
else:
Log.info(self, "MySQL is not installed")
else:
Log.warn(self, "Remote MySQL found, "
"Unable to check MySQL service status")
if pargs.redis:
if os.path.exists('{0}'.format(wo_system) +
'redis-server.service'):
services = services + ['redis-server']
else:
Log.info(self, "Redis server is not installed")
if pargs.fail2ban:
if os.path.exists('{0}'.format(wo_system) + 'fail2ban.service'):
services = services + ['fail2ban']
else:
Log.info(self, "fail2ban is not installed")
# proftpd
if pargs.proftpd:
if os.path.exists('/etc/init.d/proftpd'):
services = services + ['proftpd']
else:
Log.info(self, "ProFTPd is not installed")
# netdata
if pargs.netdata:
if os.path.exists('{0}'.format(wo_system) + 'netdata.service'):
services = services + ['netdata']
else:
Log.info(self, "Netdata is not installed")
for service in services:
if WOService.get_service_status(self, service):
Log.info(self, "{0:10}: {1}".format(service, "Running"))
@expose(help="Reload stack services")
def reload(self):
"""Reload service"""
services = []
wo_system = "/lib/systemd/system/"
pargs = self.app.pargs
if not (pargs.nginx or pargs.php or
pargs.php73 or
pargs.mysql or
pargs.netdata or
pargs.proftpd or
pargs.redis or
pargs.fail2ban):
pargs.nginx = True
pargs.php = True
pargs.mysql = True
pargs.fail2ban = True
if pargs.nginx:
if os.path.exists('{0}'.format(wo_system) + 'nginx.service'):
services = services + ['nginx']
else:
Log.info(self, "Nginx is not installed")
if pargs.php:
if os.path.exists('{0}'.format(wo_system) + 'php7.2-fpm.service'):
services = services + ['php7.2-fpm']
else:
Log.info(self, "PHP7.2-FPM is not installed")
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.php73:
if os.path.exists('{0}'.format(wo_system) + 'php7.3-fpm.service'):
services = services + ['php7.3-fpm']
else:
Log.info(self, "PHP7.3-FPM is not installed")
if pargs.mysql:
if ((WOVar.wo_mysql_host == "localhost") or
(WOVar.wo_mysql_host == "127.0.0.1")):
if os.path.exists('/etc/systemd/system/mysql.service'):
services = services + ['mysql']
else:
Log.info(self, "MySQL is not installed")
else:
Log.warn(self, "Remote MySQL found, "
"Unable to check MySQL service status")
if pargs.redis:
if os.path.exists('{0}'.format(wo_system) +
'redis-server.service'):
services = services + ['redis-server']
else:
Log.info(self, "Redis server is not installed")
if pargs.fail2ban:
if os.path.exists('{0}'.format(wo_system) + 'fail2ban.service'):
services = services + ['fail2ban']
else:
Log.info(self, "fail2ban is not installed")
# proftpd
if pargs.proftpd:
if os.path.exists('/etc/init.d/proftpd'):
services = services + ['proftpd']
else:
Log.info(self, "ProFTPd is not installed")
# netdata
if pargs.netdata:
if os.path.exists('{0}'.format(wo_system) + 'netdata.service'):
services = services + ['netdata']
else:
Log.info(self, "Netdata is not installed")
for service in services:
Log.debug(self, "Reloading service: {0}".format(service))
WOService.reload_service(self, service)
| 37.140845
| 78
| 0.49861
| 1,728
| 15,822
| 4.525463
| 0.051505
| 0.044757
| 0.064706
| 0.080563
| 0.916368
| 0.886573
| 0.884015
| 0.884015
| 0.884015
| 0.874297
| 0
| 0.021437
| 0.377892
| 15,822
| 425
| 79
| 37.228235
| 0.773037
| 0.010113
| 0
| 0.907563
| 0
| 0
| 0.203686
| 0.010559
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014006
| false
| 0
| 0.014006
| 0
| 0.033613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
864d7b64cc0bbe629da71f707b1ca0deab7b6c70
| 49
|
py
|
Python
|
test/run/t115.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t115.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t115.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
a = [1,2,3,4,5,6]
b = [9,9,9]
a[1:2] = b
print a
| 9.8
| 17
| 0.428571
| 17
| 49
| 1.235294
| 0.588235
| 0.190476
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289474
| 0.22449
| 49
| 4
| 18
| 12.25
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 1
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
86a415606c878336805fc01729d68d48b0dc052a
| 4,941
|
py
|
Python
|
app/test/test_config.py
|
Andrea-MariaDB/Flask-REST-Server-Repository
|
f5d1ceffeb3456b200b90e4f05d7430aa3f1eea4
|
[
"MIT"
] | 5
|
2019-09-18T11:48:16.000Z
|
2021-11-09T11:24:12.000Z
|
app/test/test_config.py
|
prakharlovesdata/vogo-hack-server
|
2ea407325b5842800f30906b23d74dd60c6de711
|
[
"MIT"
] | 11
|
2019-09-13T07:14:25.000Z
|
2021-06-02T00:54:06.000Z
|
app/test/test_config.py
|
prakharlovesdata/vogo-hack-server
|
2ea407325b5842800f30906b23d74dd60c6de711
|
[
"MIT"
] | 3
|
2019-10-11T09:36:04.000Z
|
2019-12-20T10:17:32.000Z
|
"""
Tests the app.main.config module.
"""
from flask import current_app
from flask_testing import TestCase
from manage import app
class TestBaseConfig(TestCase):
"""
Testclass for testing BaseConfig class.
This class checks if certain basic settings are
available for the app.
Following test methods are present
* test_app_is_base_config
You need not instantiate this class, the test loader
will take care of test discovery and pick all modules
in app/test that begin with `test` string.
After the modules has been discovered the runner will pick
up all the classes that inherit from the flask_testing.TestCase
which is a wrapper around unittest.TestCase.
After the test classes have been loaded the runner will
run all methods that start with test_.
"""
def create_app(self):
app.config.from_object("app.src.config.BaseConfig")
return app
def test_app_is_base_config(self):
"""
Method to test whether the base config class
functions perfectly right or not.
Asserts whether the proper values for the
defined config are set or not.
"""
self.assertIsNotNone(app.config.get("BASE_DIR"))
self.assertIsNotNone(app.config.get("SECRET_KEY"))
self.assertIsNotNone(app.config.get("ENV"))
self.assertIsNotNone(app.config.get("DEBUG"))
self.assertFalse(app.config.get("TESTING"))
class TestDevelopmentConfig(TestCase):
"""
Testclass for testing DevelopmentConfig class.
This class checks if certain basic settings are
available for the app in development environment.
Following test methods are present
* test_app_is_development
You need not instantiate this class, the test loader
will take care of test discovery and pick all modules
in app/test that begin with `test` string.
After the modules has been discovered the runner will pick
up all the classes that inherit from the flask_testing.TestCase
which is a wrapper around unittest.TestCase.
After the test classes have been loaded the runner will
run all methods that start with test_.
"""
def create_app(self):
app.config.from_object("app.src.config.DevelopmentConfig")
return app
def test_app_is_development(self):
"""
Method to test whether the development
config class functions perfectly right or not.
Asserts whether the proper values for the
defined config are set or not.
"""
self.assertIsNotNone(current_app)
class TestTestingConfig(TestCase):
"""
Testclass for testing TestingConfig class.
This class checks if certain basic settings are
available for the app in testing environment.
Following test methods are present
* test_app_is_testing
You need not instantiate this class, the test loader
will take care of test discovery and pick all modules
in app/test that begin with `test` string.
After the modules has been discovered the runner will pick
up all the classes that inherit from the flask_testing.TestCase
which is a wrapper around unittest.TestCase.
After the test classes have been loaded the runner will
run all methods that start with test_.
"""
def create_app(self):
app.config.from_object("app.src.config.TestingConfig")
return app
def test_app_is_testing(self):
"""
Method to test whether the test
config class functions perfectly right or not.
Asserts whether the proper values for the
defined config are set or not.
"""
self.assertTrue(app.config.get("TESTING"))
self.assertTrue(app.config.get("JSONIFY_PRETTYPRINT_REGULAR"))
class TestProductionConfig(TestCase):
"""
Testclass for testing ProductionConfig class.
This class checks if certain basic settings are
available for the app in production environment.
Following test methods are present
* test_app_is_production
You need not instantiate this class, the test loader
will take care of test discovery and pick all modules
in app/test that begin with `test` string.
After the modules has been discovered the runner will pick
up all the classes that inherit from the flask_testing.TestCase
which is a wrapper around unittest.TestCase.
After the test classes have been loaded the runner will
run all methods that start with test_.
"""
def create_app(self):
app.config.from_object("app.src.config.ProductionConfig")
return app
def test_app_is_production(self):
"""
Method to test whether the production
config class functions perfectly right or not.
Asserts whether the proper values for the
defined config are set or not.
"""
self.assertTrue(app.config.get("JSONIFY_PRETTYPRINT_REGULAR"))
| 33.612245
| 70
| 0.707144
| 677
| 4,941
| 5.085672
| 0.15805
| 0.031368
| 0.020912
| 0.031368
| 0.825443
| 0.789137
| 0.734534
| 0.734534
| 0.701133
| 0.657566
| 0
| 0
| 0.240235
| 4,941
| 146
| 71
| 33.842466
| 0.917155
| 0.625987
| 0
| 0.3125
| 0
| 0
| 0.15
| 0.121429
| 0
| 0
| 0
| 0
| 0.28125
| 1
| 0.25
| false
| 0
| 0.09375
| 0
| 0.59375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86c950fbfaaa82f9e9013344b9adb307129c7287
| 10,574
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowSpanningTreeSummary/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowSpanningTreeSummary/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowSpanningTreeSummary/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"backbone_fast": False,
"bpdu_filter": False,
"bpdu_guard": False,
"bridge_assurance": True,
"configured_pathcost": {"method": "short"},
"etherchannel_misconfig_guard": True,
"extended_system_id": True,
"loop_guard": False,
"mode": {
"rapid_pvst": {
"VLAN0001": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN0115": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0116": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0118": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0119": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0121": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0180": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0501": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0502": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0503": {
"blocking": 0,
"forwarding": 3,
"learning": 0,
"listening": 0,
"stp_active": 3,
},
"VLAN0506": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0508": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0509": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0510": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0511": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0512": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0513": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0514": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0515": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0516": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0517": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0518": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0521": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0522": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0540": {
"blocking": 0,
"forwarding": 3,
"learning": 0,
"listening": 0,
"stp_active": 3,
},
"VLAN0601": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0602": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0603": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN0604": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0606": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN0701": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0801": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0802": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0803": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0804": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0805": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0806": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN0916": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN1111": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN1112": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN1113": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN1114": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN1115": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN1116": {
"blocking": 0,
"forwarding": 2,
"learning": 0,
"listening": 0,
"stp_active": 2,
},
"VLAN1125": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN1506": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN1509": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
"VLAN1601": {
"blocking": 0,
"forwarding": 1,
"learning": 0,
"listening": 0,
"stp_active": 1,
},
}
},
"portfast_default": False,
"pvst_simulation": True,
"pvst_simulation_status": "inactive",
"root_bridge_for": "VLAN0001, VLAN0115-VLAN0116, VLAN0118-VLAN0119, VLAN0121, VLAN0180, VLAN0501-VLAN0503, VLAN0506, VLAN0508-VLAN0518, VLAN0521-VLAN0522, VLAN0540, VLAN0601-VLAN0604, VLAN0606, VLAN0701, VLAN0801-VLAN0806, VLAN1111-VLAN1116, VLAN1506, VLAN1509, VLAN1601",
"total_statistics": {
"blockings": 0,
"forwardings": 62,
"learnings": 0,
"listenings": 0,
"num_of_vlans": 48,
"stp_actives": 62,
},
"uplink_fast": False,
}
| 29.049451
| 276
| 0.319368
| 667
| 10,574
| 4.956522
| 0.152924
| 0.130672
| 0.275862
| 0.275862
| 0.711434
| 0.711434
| 0.711434
| 0.711434
| 0.711434
| 0.711434
| 0
| 0.114568
| 0.550123
| 10,574
| 363
| 277
| 29.129477
| 0.580408
| 0
| 0
| 0.661157
| 0
| 0.002755
| 0.294023
| 0.004729
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
86f97bcb88d0db39e0c7a880a46b740ac81492d6
| 3,347
|
py
|
Python
|
tests/test_predictionquery.py
|
Duke-GCB/PredictionsDB
|
066278425890288d9e430a46096a347453301b08
|
[
"MIT"
] | null | null | null |
tests/test_predictionquery.py
|
Duke-GCB/PredictionsDB
|
066278425890288d9e430a46096a347453301b08
|
[
"MIT"
] | 57
|
2016-09-16T15:23:49.000Z
|
2021-09-07T15:20:22.000Z
|
tests/test_predictionquery.py
|
Duke-GCB/PredictionsDB
|
066278425890288d9e430a46096a347453301b08
|
[
"MIT"
] | 1
|
2016-09-09T20:03:48.000Z
|
2016-09-09T20:03:48.000Z
|
from unittest import TestCase
from pred.queries.predictionquery import PredictionQuery
QUERY_BASE = """SET search_path TO %s,public;
select
max(common_name) as common_name,
string_agg(name, '; ') as name,
case WHEN max(value) > abs(min(value)) THEN
round(max(value), 4)
ELSE
round(min(value), 4)
end as max_value,
max(chrom) as chrom,
max(strand) as strand,
max(gene_begin) as gene_begin,
json_agg(json_build_object('value', round(value, 4), 'start', start_range, 'end', end_range)) as pred
from gene_prediction
where
gene_list = %s
and
model_name = %s
and
case strand when '+' then
int4range(gene_begin - %s, gene_begin + %s) @> int4range(start_range, end_range)
else
int4range(gene_begin - %s, gene_begin + %s) @> int4range(start_range, end_range)
end
group by gene_id
order by gene_id{}"""
GENE_LIST_FILTER_WITH_LIMIT = QUERY_BASE.format("\nlimit %s offset %s")
GENE_LIST_FILTER = QUERY_BASE.format("")
COUNT_QUERY = """SET search_path TO %s,public;
select count(*) from (
select
max(common_name) as common_name,
string_agg(name, '; ') as name,
case WHEN max(value) > abs(min(value)) THEN
round(max(value), 4)
ELSE
round(min(value), 4)
end as max_value,
max(chrom) as chrom,
max(strand) as strand,
max(gene_begin) as gene_begin,
json_agg(json_build_object('value', round(value, 4), 'start', start_range, 'end', end_range)) as pred
from gene_prediction
where
gene_list = %s
and
model_name = %s
and
case strand when '+' then
int4range(gene_begin - %s, gene_begin + %s) @> int4range(start_range, end_range)
else
int4range(gene_begin - %s, gene_begin + %s) @> int4range(start_range, end_range)
end
group by gene_id
) as foo"""
class TestPredictionQuery(TestCase):
def test_filter_with_limit(self):
expected_sql = GENE_LIST_FILTER_WITH_LIMIT
expected_params = ["hg38", "knowngene", "E2F4", "150", "250", "250", "150", "100", "200"]
query = PredictionQuery(
schema="hg38",
gene_list="knowngene",
model_name="E2F4",
upstream="150",
downstream="250",
limit="100",
offset="200",
)
sql, params = query.get_query_and_params()
self.assertMultiLineEqual(expected_sql, sql)
self.assertEqual(expected_params, params)
def test_filter(self):
expected_sql = GENE_LIST_FILTER
expected_params = ["hg38", "knowngene", "E2F4", "150", "250", "250", "150"]
query = PredictionQuery(
schema="hg38",
gene_list="knowngene",
model_name="E2F4",
upstream="150",
downstream="250",
)
sql, params = query.get_query_and_params()
self.maxDiff = None
self.assertMultiLineEqual(expected_sql, sql)
self.assertEqual(expected_params, params)
def test_count(self):
expected_sql = COUNT_QUERY
expected_params = ["hg38", "knowngene", "E2F4", "150", "250", "250", "150"]
query = PredictionQuery(
schema="hg38",
gene_list="knowngene",
model_name="E2F4",
upstream="150",
downstream="250",
count=True,
)
sql, params = query.get_query_and_params()
self.maxDiff = None
self.assertMultiLineEqual(expected_sql, sql)
self.assertEqual(expected_params, params)
| 29.619469
| 101
| 0.650134
| 440
| 3,347
| 4.722727
| 0.188636
| 0.051973
| 0.038499
| 0.036574
| 0.854187
| 0.838787
| 0.810876
| 0.786814
| 0.771896
| 0.751203
| 0
| 0.039939
| 0.22199
| 3,347
| 112
| 102
| 29.883929
| 0.758065
| 0
| 0
| 0.77451
| 0
| 0.019608
| 0.475179
| 0.047249
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.029412
| false
| 0
| 0.019608
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8119c042a348be12973c5298ba16bff6dd97dacc
| 2,357
|
py
|
Python
|
library/cactus-test.py
|
kausalyamahadevan/knitout-frontend-py
|
1a0414a88db698383dda7e9b45ec1293948b369b
|
[
"MIT"
] | null | null | null |
library/cactus-test.py
|
kausalyamahadevan/knitout-frontend-py
|
1a0414a88db698383dda7e9b45ec1293948b369b
|
[
"MIT"
] | null | null | null |
library/cactus-test.py
|
kausalyamahadevan/knitout-frontend-py
|
1a0414a88db698383dda7e9b45ec1293948b369b
|
[
"MIT"
] | null | null | null |
import knitout
import gabrielle
k = knitout.Writer('1 2 3 4 5 6')
# for n in range(0, 121):
# k.drop(f'f{n}')
# for n in range(120, -1, -1):
# k.drop(f'b{n}')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png')
# k.write('cactus-test.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', 2)
# k.write('cactus-test-gauge2.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus-waste-test.png')
# k.write('cactus-waste-test.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus-4sections.png')
# k.write('cactus-4sections.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus-5sections.png')
# k.write('cactus-5sections.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', 2, 1, 4)
# k.write('cactus-test-gauge2-sr4.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', gauge=2, scale=1, maxShortrowCount=1)
# k.write('cactus-test-gauge2-sr1.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', gauge=2, scale=2, maxShortrowCount=4)
# k.write('cactus-test-gauge2-scale2-sr4-2.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', gauge=1, scale=2, maxShortrowCount=4)
# k.write('cactus-test-scale2-sr4.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', gauge=1, scale=2, maxShortrowCount=4)
# k.write('cactus-test-scale2-sr4-2.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', gauge=1, scale=1, maxShortrowCount=4)
# k.write('cactus-test-sr4.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', gauge=2, scale=3.5, maxShortrowCount=4)
# k.write('cactus-test-gauge2-scale3-sr4.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus.png', gauge=1, scale=6, maxShortrowCount=4)
# k.write('cactus-test-scale6-sr4.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus-boundary.png', gauge=2, scale=2, maxShortrowCount=4)
# k.write('cactus-test-gauge2-scale2-sr4-stackedCheck.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus-medium.png', scale=2, maxShortrowCount=4)
# k.write('cactus-test-medium-scale2-sr6.k')
# gabrielle.shapeImgToKnitout(k, 'graphics/cactus-medium.png', gauge=2, maxShortrowCount=4, addBindoff=False, excludeCarriers=['4'])
# k.write('cactus-test-medium-gauge2-sr4-take3.k')
gabrielle.shapeImgToKnitout(k, 'graphics/cactus-medium.png', gauge=2, maxShortrowCount=4, addBindoff=False, excludeCarriers=['4'])
k.write('cactus-test-medium-gauge2-sr4-changes.k')
| 38.016129
| 132
| 0.737378
| 340
| 2,357
| 5.111765
| 0.141176
| 0.254315
| 0.264097
| 0.342348
| 0.837745
| 0.812428
| 0.68412
| 0.60702
| 0.502302
| 0.502302
| 0
| 0.039359
| 0.072974
| 2,357
| 62
| 133
| 38.016129
| 0.756064
| 0.855325
| 0
| 0
| 0
| 0
| 0.252459
| 0.213115
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
8127fb5a6394c5b7a7a042f8224bba0bd394736f
| 458
|
py
|
Python
|
django_base_model/signals.py
|
dukic-dev/django-base-model
|
58494d4d128a8362166a58adbc557a21abdff471
|
[
"MIT"
] | null | null | null |
django_base_model/signals.py
|
dukic-dev/django-base-model
|
58494d4d128a8362166a58adbc557a21abdff471
|
[
"MIT"
] | null | null | null |
django_base_model/signals.py
|
dukic-dev/django-base-model
|
58494d4d128a8362166a58adbc557a21abdff471
|
[
"MIT"
] | null | null | null |
import django.dispatch
base_create = django.dispatch.Signal(providing_args=["obj", "user"])
base_update = django.dispatch.Signal(providing_args=["objs", "user"])
base_delete = django.dispatch.Signal(providing_args=["obj", "user"])
base_bulk_create = django.dispatch.Signal(providing_args=["objs", "user"])
base_bulk_delete = django.dispatch.Signal(providing_args=["objs", "user"])
base_bulk_update = django.dispatch.Signal(providing_args=["objs", "user"])
| 45.8
| 74
| 0.766376
| 60
| 458
| 5.6
| 0.233333
| 0.291667
| 0.357143
| 0.517857
| 0.916667
| 0.916667
| 0.845238
| 0.845238
| 0.291667
| 0
| 0
| 0
| 0.061135
| 458
| 9
| 75
| 50.888889
| 0.781395
| 0
| 0
| 0
| 0
| 0
| 0.100437
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8143cfd9750d7e005610f613685b07d8f6ca809d
| 135
|
py
|
Python
|
desafio/desafio107.py
|
henriquekirchheck/Curso-em-video-Python
|
1a29f68515313af85c8683f626ba35f8fcdd10e7
|
[
"MIT"
] | null | null | null |
desafio/desafio107.py
|
henriquekirchheck/Curso-em-video-Python
|
1a29f68515313af85c8683f626ba35f8fcdd10e7
|
[
"MIT"
] | null | null | null |
desafio/desafio107.py
|
henriquekirchheck/Curso-em-video-Python
|
1a29f68515313af85c8683f626ba35f8fcdd10e7
|
[
"MIT"
] | null | null | null |
from utils import moeda
print(moeda.aumentar(100, 50))
print(moeda.diminuir(100, 50))
print(moeda.dobro(100))
print(moeda.metade(100))
| 22.5
| 30
| 0.762963
| 22
| 135
| 4.681818
| 0.5
| 0.38835
| 0.194175
| 0.291262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128
| 0.074074
| 135
| 6
| 31
| 22.5
| 0.696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0.8
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d4c63427c2510b8eb50620988339aa4b3284f4ec
| 838
|
py
|
Python
|
api/lib/decorator.py
|
lilixiang/cmdb
|
d60857c26b9b81c8a33b72548b637cbde8782fe1
|
[
"MIT"
] | 1
|
2020-02-15T00:13:45.000Z
|
2020-02-15T00:13:45.000Z
|
api/lib/decorator.py
|
lilixiang/cmdb
|
d60857c26b9b81c8a33b72548b637cbde8782fe1
|
[
"MIT"
] | 1
|
2019-11-19T13:52:16.000Z
|
2019-11-19T13:52:16.000Z
|
api/lib/decorator.py
|
lilixiang/cmdb
|
d60857c26b9b81c8a33b72548b637cbde8782fe1
|
[
"MIT"
] | 1
|
2019-10-31T07:55:20.000Z
|
2019-10-31T07:55:20.000Z
|
# -*- coding:utf-8 -*-
from functools import wraps
from flask import abort
from flask import request
def kwargs_required(*required_args):
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
for arg in required_args:
if arg not in kwargs:
return abort(400, "Argument <{0}> is required".format(arg))
return func(*args, **kwargs)
return wrapper
return decorate
def args_required(*required_args):
def decorate(func):
@wraps(func)
def wrapper(*args, **kwargs):
for arg in required_args:
if arg not in request.values:
return abort(400, "Argument <{0}> is required".format(arg))
return func(*args, **kwargs)
return wrapper
return decorate
| 23.277778
| 79
| 0.576372
| 98
| 838
| 4.867347
| 0.295918
| 0.100629
| 0.062893
| 0.096436
| 0.767296
| 0.767296
| 0.767296
| 0.767296
| 0.767296
| 0.767296
| 0
| 0.015873
| 0.323389
| 838
| 35
| 80
| 23.942857
| 0.825397
| 0.023866
| 0
| 0.695652
| 0
| 0
| 0.063725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26087
| false
| 0
| 0.130435
| 0
| 0.73913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
d4ce69bc41a23da536c0b01ac97cba4bb837c83d
| 103
|
py
|
Python
|
docs/source/examples/test_func.py
|
harupy/sphinx-plotly-directive
|
1a95a2c5dde0ff177647fb3f5aecb9b86c29a91c
|
[
"MIT"
] | 12
|
2020-10-21T13:18:19.000Z
|
2022-01-21T13:44:31.000Z
|
docs/source/examples/test_func.py
|
harupy/sphinx-plotly-directive
|
1a95a2c5dde0ff177647fb3f5aecb9b86c29a91c
|
[
"MIT"
] | 1
|
2020-10-19T15:04:18.000Z
|
2020-10-24T08:19:56.000Z
|
docs/source/examples/test_func.py
|
harupy/sphinx-plotly-directive
|
1a95a2c5dde0ff177647fb3f5aecb9b86c29a91c
|
[
"MIT"
] | null | null | null |
import plotly.express as px
def func():
return px.scatter(x=[0, 1, 2, 3, 4], y=[0, 1, 4, 9, 16])
| 17.166667
| 60
| 0.563107
| 22
| 103
| 2.636364
| 0.818182
| 0.068966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1375
| 0.223301
| 103
| 5
| 61
| 20.6
| 0.5875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d4d28fce7617d11532d69066dbfb0b649a1c1bb7
| 2,570
|
py
|
Python
|
switchport.py
|
sekozzi/switchport-python
|
2db921d0a64f16a441da237ab0be92e7cc98d344
|
[
"Apache-2.0"
] | null | null | null |
switchport.py
|
sekozzi/switchport-python
|
2db921d0a64f16a441da237ab0be92e7cc98d344
|
[
"Apache-2.0"
] | null | null | null |
switchport.py
|
sekozzi/switchport-python
|
2db921d0a64f16a441da237ab0be92e7cc98d344
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
import os, re, sys, subprocess, shlex
versioncheck = sys.version_info[0]
interface = str(sys.argv[1])
def switchportV2(interface):
cmd = "tcpdump -nn -v -i {0} -s 1500 -c 1 'ether[20:2] == 0x2000'".format(interface)
p1 = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p = subprocess.Popen(['egrep', 'Device-ID|Port-ID|VLAN'], stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p1.stdout.close()
outs, err = p.communicate()
outs = outs.split('\n')
out = []
for line in outs:
out.append(line)
#Switch bilgi alinip, parse edilir. // to get switch information and parse
switch = str(out[0])
switch = re.findall("[A-Za-z-]+[0-9]+[A-Z]+[0-9]+", switch)
switch = switch[0]
#Port bilgisi alinip, parse edilir. // to get port information and parse
port = str(out[1])
port = re.findall("[A-z-]+[0-9]+[[0-9]+[\W]+[0-9]+[\W]+[0-9]+", port)
if len(port) == 0:
port = str(out[1])
port = re.findall("[A-z]+[0-9]+[\W]+[0-9]+", port)
port = port[0]
#Vlan bilgisi alininir, parse edilir. // to get vlan information and parse
vlan = str(out[2])
vlan = re.findall("[0-9]+", vlan)
vlan = vlan[3]
print('Switch: ' + switch)
print('Port: ' + port)
print('Vlan: ' + vlan)
def switchportV3(interface):
cmd = "tcpdump -nn -v -i {0} -s 1500 -c 1 'ether[20:2] == 0x2000'".format(interface)
p1 = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p = subprocess.Popen(['egrep', 'Device-ID|Port-ID|VLAN'], stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p1.stdout.close()
outs, err = p.communicate()
outs = outs.split(b'\n')
out = []
for line in outs:
out.append(line)
#Switch bilgi alinip, parse edilir.
switch = str(out[0])
switch = re.findall("[A-Za-z-]+[0-9]+[A-Z]+[0-9]+", switch)
switch = switch[0]
#Port bilgisi alinip, parse edilir.
port = str(out[1])
port = re.findall("[A-z-]+[0-9]+[[0-9]+[\W]+[0-9]+[\W]+[0-9]+", port)
if len(port) == 0:
port = str(out[1])
port = re.findall("[A-z]+[0-9]+[\W]+[0-9]+", port)
port = port[0]
#Vlan bilgisi alininir, parse edilir.
vlan = str(out[2])
vlan = re.findall("[0-9]+", vlan)
vlan = vlan[3]
print('Switch: ' + switch)
print('Port: ' + port)
print('Vlan: ' + vlan)
if versioncheck == 2:
switchportV2(interface)
elif versioncheck == 3:
switchportV3(interface)
| 33.376623
| 126
| 0.587938
| 382
| 2,570
| 3.95288
| 0.206806
| 0.023841
| 0.015894
| 0.015894
| 0.822517
| 0.815894
| 0.815894
| 0.815894
| 0.815894
| 0.815894
| 0
| 0.046397
| 0.211673
| 2,570
| 76
| 127
| 33.815789
| 0.698914
| 0.131128
| 0
| 0.807018
| 0
| 0.070175
| 0.185085
| 0.103324
| 0
| 0
| 0.005391
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.017544
| 0
| 0.052632
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4f4584a62463f8463454c770280c5e15553b569
| 8,157
|
py
|
Python
|
tests/examples/minlplib/nvs23.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 2
|
2021-07-03T13:19:10.000Z
|
2022-02-06T10:48:13.000Z
|
tests/examples/minlplib/nvs23.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | 1
|
2021-07-04T14:52:14.000Z
|
2021-07-15T10:17:11.000Z
|
tests/examples/minlplib/nvs23.py
|
ouyang-w-19/decogo
|
52546480e49776251d4d27856e18a46f40c824a1
|
[
"MIT"
] | null | null | null |
# MINLP written by GAMS Convert at 04/21/18 13:52:43
#
# Equation counts
# Total E G L N X C B
# 10 1 9 0 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 10 1 0 9 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 91 1 90 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.i1 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i2 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i3 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i4 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i5 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i6 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i7 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i8 = Var(within=Integers,bounds=(0,200),initialize=100)
m.i9 = Var(within=Integers,bounds=(0,200),initialize=100)
m.obj = Objective(expr=7*m.i1**2 + 6*m.i2**2 + 24.4*m.i1 - 0.2*m.i2 + 8*m.i3**2 - 6*m.i3*m.i1 + 4*m.i3*m.i2 + m.i3 + 6*
m.i4**2 + 2*m.i4*m.i1 + 2*m.i4*m.i3 - 39.2*m.i4 + 7*m.i5**2 - 4*m.i5*m.i1 - 2*m.i5*m.i2 - 6*m.i5*
m.i3 - 118.4*m.i5 + 4*m.i6**2 + 2*m.i6*m.i1 - 4*m.i6*m.i2 - 4*m.i6*m.i3 - 2*m.i6*m.i4 + 6*m.i6*
m.i5 - 73*m.i6 + 6*m.i7**2 - 2*m.i7*m.i1 - 6*m.i7*m.i2 - 2*m.i7*m.i3 + 4*m.i7*m.i5 + 4*m.i7*m.i6
- 110.8*m.i7 + 7*m.i8**2 - 4*m.i8*m.i1 - 2*m.i8*m.i2 + 6*m.i8*m.i3 + 4*m.i8*m.i4 - 4*m.i8*m.i5
- 2*m.i8*m.i6 + 4*m.i8*m.i7 - 17.8*m.i8 + 8*m.i9**2 - 2*m.i9*m.i1 - 4*m.i9*m.i2 + 4*m.i9*m.i3 +
4*m.i9*m.i4 - 4*m.i9*m.i5 - 4*m.i9*m.i6 + 8*m.i9*m.i7 + 4*m.i9*m.i8 - 29.4*m.i9, sense=minimize)
m.c1 = Constraint(expr=(-9*m.i1**2) - 10*m.i1*m.i2 - 8*m.i2**2 - 5*m.i3**2 - 6*m.i3*m.i1 - 10*m.i3*m.i2 - 7*m.i4**2 - 10
*m.i4*m.i1 - 6*m.i4*m.i2 - 2*m.i4*m.i3 - 2*m.i5*m.i2 - 7*m.i5**2 - 6*m.i6*m.i1 - 2*m.i6*m.i2 - 2*
m.i6*m.i4 - 5*m.i6**2 + 6*m.i7*m.i1 + 2*m.i7*m.i2 + 4*m.i7*m.i3 + 2*m.i7*m.i4 - 4*m.i7*m.i5 + 4*
m.i7*m.i6 - 8*m.i7**2 - 2*m.i8*m.i1 - 8*m.i8*m.i2 - 2*m.i8*m.i3 + 6*m.i8*m.i5 - 2*m.i8*m.i7 - 6*
m.i8**2 + 2*m.i9*m.i3 - 4*m.i9*m.i4 + 8*m.i9*m.i5 + 4*m.i9*m.i6 - 6*m.i9*m.i8 - 6*m.i9**2
>= -1850)
m.c2 = Constraint(expr=(-6*m.i1**2) - 8*m.i1*m.i2 - 6*m.i2**2 - 4*m.i3**2 - 2*m.i3*m.i1 - 2*m.i3*m.i2 - 8*m.i4**2 + 2*
m.i4*m.i1 + 10*m.i4*m.i2 - 2*m.i5*m.i1 - 6*m.i5*m.i2 + 6*m.i5*m.i4 + 7*m.i5**2 - 2*m.i6*m.i2 + 8*
m.i6*m.i3 + 2*m.i6*m.i4 - 4*m.i6*m.i5 - 8*m.i6**2 - 6*m.i7*m.i1 - 10*m.i7*m.i2 - 2*m.i7*m.i3 + 10
*m.i7*m.i4 - 10*m.i7*m.i5 - 8*m.i7**2 - 2*m.i8*m.i1 - 4*m.i8*m.i2 - 2*m.i8*m.i3 - 8*m.i8*m.i5 - 8
*m.i8*m.i7 - 5*m.i8**2 - 2*m.i9*m.i1 - 2*m.i9*m.i2 + 4*m.i9*m.i6 + 2*m.i9*m.i7 - 6*m.i9**2
>= -3170)
m.c3 = Constraint(expr=(-9*m.i1**2) - 6*m.i2**2 - 8*m.i3**2 + 2*m.i2*m.i1 + 2*m.i3*m.i2 - 6*m.i4**2 + 4*m.i4*m.i1 + 4*
m.i4*m.i2 - 2*m.i4*m.i3 - 6*m.i5*m.i1 - 2*m.i5*m.i2 + 4*m.i5*m.i4 + 6*m.i5**2 + 2*m.i6*m.i1 + 4*
m.i6*m.i2 - 6*m.i6*m.i4 - 2*m.i6*m.i5 - 5*m.i6**2 + 2*m.i7*m.i2 - 4*m.i7*m.i3 - 6*m.i7*m.i5 - 4*
m.i7*m.i6 - 7*m.i7**2 - 2*m.i8*m.i1 + 4*m.i8*m.i3 + 2*m.i8*m.i4 - 4*m.i8**2 + 10*m.i9*m.i1 + 6*
m.i9*m.i2 - 4*m.i9*m.i3 - 10*m.i9*m.i4 + 8*m.i9*m.i5 - 6*m.i9*m.i6 - 2*m.i9*m.i7 - 8*m.i9**2
>= -1770)
m.c4 = Constraint(expr=(-8*m.i1**2) - 4*m.i2**2 - 9*m.i3**2 - 7*m.i4**2 - 2*m.i2*m.i1 - 2*m.i3*m.i1 - 4*m.i3*m.i2 + 6*
m.i4*m.i1 + 2*m.i4*m.i2 - 2*m.i4*m.i3 - 6*m.i5*m.i1 - 4*m.i5*m.i2 - 2*m.i5*m.i3 + 6*m.i5*m.i4 + 6
*m.i5**2 - 10*m.i6*m.i1 - 10*m.i6*m.i3 + 4*m.i6*m.i4 - 2*m.i6*m.i5 - 7*m.i6**2 + 6*m.i7*m.i1 - 2*
m.i7*m.i2 - 2*m.i7*m.i3 + 6*m.i7*m.i5 + 2*m.i7*m.i6 - 6*m.i7**2 + 4*m.i8*m.i1 - 4*m.i8*m.i2 + 2*
m.i8*m.i3 - 4*m.i8*m.i4 - 4*m.i8*m.i5 + 8*m.i8*m.i6 + 6*m.i8*m.i6 - 8*m.i8**2 - 4*m.i9*m.i1 + 4*
m.i9*m.i2 + 6*m.i9*m.i3 - 2*m.i9*m.i4 + 2*m.i9*m.i6 + 8*m.i9*m.i7 - 4*m.i9*m.i8 - 10*m.i9**2
>= -1460)
m.c5 = Constraint(expr=2*m.i2*m.i1 - 4*m.i1**2 - 5*m.i2**2 - 6*m.i3*m.i1 - 8*m.i3**2 - 2*m.i4*m.i1 + 6*m.i4*m.i2 - 2*
m.i4*m.i3 - 6*m.i4**2 - 4*m.i5*m.i1 + 2*m.i5*m.i2 - 6*m.i5*m.i3 - 8*m.i5*m.i4 - 7*m.i5**2 + 4*
m.i6*m.i1 - 4*m.i6*m.i2 + 6*m.i6*m.i3 + 4*m.i6*m.i5 - 7*m.i6**2 + 4*m.i7*m.i1 - 4*m.i7*m.i2 - 4*
m.i7*m.i3 + 4*m.i7*m.i4 + 4*m.i7*m.i5 + 4*m.i7*m.i6 - 8*m.i7**2 - 2*m.i8*m.i1 + 4*m.i8*m.i4 + 2*
m.i8*m.i6 + 2*m.i8*m.i7 - 4*m.i8**2 - 2*m.i9*m.i2 + 4*m.i9*m.i3 + 4*m.i9*m.i4 - 2*m.i9*m.i5 + 2*
m.i9*m.i6 + 6*m.i9*m.i7 - 6*m.i9*m.i8 - 7*m.i9**2 >= -1140)
m.c6 = Constraint(expr=2*m.i2*m.i1 - 7*m.i1**2 - 7*m.i2**2 - 6*m.i3*m.i1 - 2*m.i3*m.i2 - 6*m.i3**2 - 2*m.i4*m.i1 + 2*
m.i4*m.i2 - 2*m.i4*m.i3 - 5*m.i4**2 - 2*m.i5*m.i1 - 4*m.i5*m.i3 + 2*m.i5*m.i4 - 5*m.i5**2 + 2*
m.i6*m.i1 - 4*m.i6*m.i2 + 4*m.i6*m.i3 + 2*m.i6*m.i4 + 6*m.i6*m.i5 - 9*m.i6**2 + 4*m.i7*m.i2 - 4*
m.i7*m.i3 + 4*m.i7*m.i4 - 4*m.i7*m.i5 + 8*m.i7*m.i6 - 5*m.i7**2 + 4*m.i8*m.i1 + 8*m.i8*m.i2 + 2*
m.i8*m.i3 - 4*m.i8*m.i4 - 2*m.i8*m.i5 + 4*m.i8*m.i6 - 9*m.i8**2 - 4*m.i9*m.i1 + 2*m.i9*m.i4 + 6*
m.i9*m.i5 - 4*m.i9*m.i6 - 2*m.i9*m.i7 + 2*m.i9*m.i8 - 6*m.i9**2 >= -940)
m.c7 = Constraint(expr=(-9*m.i1**2) - 4*m.i2*m.i1 - 8*m.i2**2 + 4*m.i3*m.i1 + 2*m.i3*m.i2 - 7*m.i3**2 + 4*m.i4*m.i1 + 4*
m.i4*m.i3 - 7*m.i4**2 - 2*m.i5*m.i1 - 12*m.i5*m.i2 - 4*m.i5*m.i3 - 8*m.i5**2 - 8*m.i6*m.i1 + 2*
m.i6*m.i2 - 2*m.i6*m.i5 - 6*m.i6**2 - 4*m.i7*m.i1 - 6*m.i7*m.i2 - 2*m.i7*m.i3 + 10*m.i7*m.i4 - 2*
m.i7*m.i5 + 2*m.i7*m.i6 - 7*m.i7**2 - 2*m.i8*m.i1 + 2*m.i8*m.i2 + 2*m.i8*m.i3 + 2*m.i8*m.i4 - 6*
m.i8*m.i6 - 2*m.i8*m.i7 - 6*m.i8**2 + 4*m.i9*m.i1 + 2*m.i9*m.i2 + 4*m.i9*m.i3 + 4*m.i9*m.i4 + 2*
m.i9*m.i5 - 2*m.i9*m.i6 - 8*m.i9**2 >= -2720)
m.c8 = Constraint(expr=4*m.i2*m.i1 - 7*m.i1**2 - 8*m.i2**2 + 4*m.i3*m.i1 - 8*m.i3**2 + 4*m.i4*m.i1 + 8*m.i4*m.i2 - 6*
m.i4*m.i3 - 7*m.i4**2 - 2*m.i5*m.i2 + 2*m.i5*m.i4 - 5*m.i5**2 - 2*m.i6*m.i1 - 2*m.i6*m.i2 + 4*
m.i6*m.i4 - 4*m.i6*m.i5 - 7*m.i6**2 - 2*m.i7*m.i1 + 8*m.i7*m.i2 - 2*m.i7*m.i3 - 2*m.i7*m.i4 + 6*
m.i7*m.i5 + 2*m.i7*m.i6 - 7*m.i7**2 + 2*m.i8*m.i1 - 6*m.i8*m.i2 + 6*m.i8*m.i3 + 4*m.i8*m.i4 + 2*
m.i8*m.i5 - 4*m.i8*m.i6 - 6*m.i8**2 + 4*m.i9*m.i1 - 6*m.i9*m.i2 + 2*m.i9*m.i3 - 2*m.i9*m.i4 + 2*
m.i9*m.i5 + 6*m.i9*m.i6 + 2*m.i9*m.i7 - 4*m.i9*m.i8 - 6*m.i9**2 >= -870)
m.c9 = Constraint(expr=2*m.i2*m.i1 - 4*m.i1**2 - 7*m.i2**2 + 8*m.i3*m.i1 - 4*m.i3*m.i2 - 9*m.i3**2 - 2*m.i4*m.i1 - 4*
m.i4*m.i2 - 2*m.i4*m.i3 - 6*m.i4**2 + 4*m.i5*m.i1 + 2*m.i5*m.i2 + 4*m.i5*m.i3 + 6*m.i5*m.i4 - 6*
m.i5**2 + 4*m.i6*m.i3 - 6*m.i6*m.i4 - 7*m.i6**2 - 2*m.i7*m.i2 - 4*m.i7*m.i3 + 4*m.i7*m.i5 + 8*
m.i7*m.i6 - 7*m.i7**2 + 2*m.i8*m.i2 - 4*m.i8*m.i3 + 2*m.i8*m.i4 + 2*m.i8*m.i5 + 6*m.i8*m.i7 - 7*
m.i8**2 + 4*m.i9*m.i1 + 2*m.i9*m.i2 - 10*m.i9*m.i3 + 2*m.i9*m.i5 + 2*m.i9*m.i6 - 8*m.i9*m.i8 - 6*
m.i9**2 >= -670)
| 76.95283
| 120
| 0.420988
| 2,078
| 8,157
| 1.652551
| 0.053417
| 0.069889
| 0.074549
| 0.036401
| 0.830227
| 0.812464
| 0.774316
| 0.743739
| 0.654921
| 0.426034
| 0
| 0.242354
| 0.314576
| 8,157
| 105
| 121
| 77.685714
| 0.371848
| 0.083364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013699
| 0
| 0.013699
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be141fc6db8a847177491010e9f5e30f868d6a1b
| 58,565
|
py
|
Python
|
test/integration/component/test_vpc_routers.py
|
lafferty/cshv3
|
ee0ff7ac240bd24e19db6bd3fb9869dd087442ba
|
[
"Apache-2.0"
] | 2
|
2015-05-19T05:04:30.000Z
|
2016-09-07T00:33:17.000Z
|
test/integration/component/test_vpc_routers.py
|
lafferty/cshv3
|
ee0ff7ac240bd24e19db6bd3fb9869dd087442ba
|
[
"Apache-2.0"
] | null | null | null |
test/integration/component/test_vpc_routers.py
|
lafferty/cshv3
|
ee0ff7ac240bd24e19db6bd3fb9869dd087442ba
|
[
"Apache-2.0"
] | 2
|
2017-07-07T14:49:03.000Z
|
2018-07-31T06:38:42.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Component tests for VPC - Router Operations
"""
#Import Local Modules
import marvin
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.integration.lib.utils import *
from marvin.integration.lib.base import *
from marvin.integration.lib.common import *
from marvin.remoteSSHClient import remoteSSHClient
import datetime
class Services:
"""Test VPC Router services
"""
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 128,
},
"service_offering_new": {
"name": "Small Instance",
"displaytext": "Small Instance",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 256,
"issystem": 'true',
},
"network_offering": {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Vpn,Dhcp,Dns,SourceNat,PortForwarding,Lb,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Vpn": 'VpcVirtualRouter',
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"Lb": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
},
"network_offering_no_lb": {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Vpn,Dhcp,Dns,SourceNat,PortForwarding,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Vpn": 'VpcVirtualRouter',
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
},
"vpc_offering": {
"name": 'VPC off',
"displaytext": 'VPC off',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat',
},
"vpc": {
"name": "TestVPC",
"displaytext": "TestVPC",
"cidr": '10.0.0.1/24'
},
"network": {
"name": "Test Network",
"displaytext": "Test Network",
"netmask": '255.255.255.0'
},
"lbrule": {
"name": "SSH",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 2222,
"openfirewall": False,
"startport": 2222,
"endport": 2222,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"natrule": {
"privateport": 22,
"publicport": 22,
"startport": 22,
"endport": 22,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"fw_rule": {
"startport": 1,
"endport": 6000,
"cidr": '0.0.0.0/0',
# Any network (For creating FW rule)
"protocol": "TCP"
},
"http_rule": {
"startport": 80,
"endport": 80,
"cidrlist": '0.0.0.0/0',
"protocol": "TCP"
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
"mode": 'advanced'
}
class TestVPCRoutersBasic(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.apiclient = super(
TestVPCRoutersBasic,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient, cls.services)
cls.zone = get_zone(cls.apiclient, cls.services)
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offering"]
)
cls.vpc_off = VpcOffering.create(
cls.apiclient,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.apiclient, state='Enabled')
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls._cleanup = [cls.account]
cls._cleanup.append(cls.vpc_off)
#cls.debug("Enabling the VPC offering created")
cls.vpc_off.update(cls.apiclient, state='Enabled')
#cls.debug("creating a VPC network in the account: %s" %
# cls.account.name)
cls.services["vpc"]["cidr"] = '10.1.1.1/16'
cls.vpc = VPC.create(
cls.apiclient,
cls.services["vpc"],
vpcofferingid=cls.vpc_off.id,
zoneid=cls.zone.id,
account=cls.account.name,
domainid=cls.account.domainid
)
cls._cleanup.append(cls.service_offering)
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
return
def tearDown(self):
return
def validate_vpc_offering(self, vpc_offering):
"""Validates the VPC offering"""
self.debug("Check if the VPC offering is created successfully?")
vpc_offs = VpcOffering.list(
self.apiclient,
id=vpc_offering.id
)
self.assertEqual(
isinstance(vpc_offs, list),
True,
"List VPC offerings should return a valid list"
)
self.assertEqual(
vpc_offering.name,
vpc_offs[0].name,
"Name of the VPC offering should match with listVPCOff data"
)
self.debug(
"VPC offering is created successfully - %s" %
vpc_offering.name)
return
def validate_vpc_network(self, network, state=None):
"""Validates the VPC network"""
self.debug("Check if the VPC network is created successfully?")
vpc_networks = VPC.list(
self.apiclient,
id=network.id
)
self.assertEqual(
isinstance(vpc_networks, list),
True,
"List VPC network should return a valid list"
)
self.assertEqual(
network.name,
vpc_networks[0].name,
"Name of the VPC network should match with listVPC data"
)
if state:
self.assertEqual(
vpc_networks[0].state,
state,
"VPC state should be '%s'" % state
)
self.debug("VPC network validated - %s" % network.name)
return
def migrate_router(self, router):
""" Migrate the router """
self.debug("Checking if the host is available for migration?")
hosts = Host.list(self.apiclient, zoneid=self.zone.id, type='Routing')
self.assertEqual(
isinstance(hosts, list),
True,
"List hosts should return a valid list"
)
if len(hosts) < 2:
raise unittest.SkipTest(
"No host available for migration. Test requires atleast 2 hosts")
# Remove the host of current VM from the hosts list
hosts[:] = [host for host in hosts if host.id != router.hostid]
host = hosts[0]
self.debug("Validating if the network rules work properly or not?")
self.debug("Migrating VM-ID: %s from %s to Host: %s" % (
router.id,
router.hostid,
host.id
))
try:
#Migrate the router
cmd = migrateSystemVm.migrateSystemVmCmd()
cmd.isAsync = "false"
cmd.hostid = host.id
cmd.virtualmachineid = router.id
self.apiclient.migrateSystemVm(cmd)
except Exception as e:
self.fail("Failed to migrate instance, %s" % e)
self.debug("Waiting for Router mgiration ....")
time.sleep(240)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
router.hostid = router_response[0].hostid
self.assertEqual(router.hostid, host.id, "Migration to host %s failed. The router host is"
" still %s" % (host.id, router.hostid))
return
@attr(tags=["advanced", "intervlan"])
def test_01_stop_start_router_after_creating_vpc(self):
""" Test to stop and start router after creation of VPC
"""
# Validate following:
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Stop the VPC Virtual Router which is created as a result of VPC creation.
# 3. Start the Stopped VPC Virtual Router
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
# Stop the VPC Router
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
router = routers[0]
self.debug("Stopping the router with ID: %s" % router.id)
#Stop the router
cmd = stopRouter.stopRouterCmd()
cmd.id = router.id
self.apiclient.stopRouter(cmd)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
#List router should have router in stopped state
self.assertEqual(
router_response[0].state,
'Stopped',
"Check list router response for router state"
)
self.debug("Stopped the router with ID: %s" % router.id)
# Start The Router
self.debug("Starting the router with ID: %s" % router.id)
cmd = startRouter.startRouterCmd()
cmd.id = router.id
self.apiclient.startRouter(cmd)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
#List router should have router in running state
self.assertEqual(
router_response[0].state,
'Running',
"Check list router response for router state"
)
self.debug("Started the router with ID: %s" % router.id)
return
@attr(tags=["advanced", "intervlan"])
def test_02_reboot_router_after_creating_vpc(self):
""" Test to reboot the router after creating a VPC
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Reboot the VPC Virtual Router which is created as a result of VPC creation.
# Stop the VPC Router
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
router = routers[0]
self.debug("Rebooting the router ...")
#Reboot the router
cmd = rebootRouter.rebootRouterCmd()
cmd.id = router.id
self.apiclient.rebootRouter(cmd)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
#List router should have router in running state and same public IP
self.assertEqual(
router_response[0].state,
'Running',
"Check list router response for router state"
)
return
@attr(tags=["advanced", "intervlan"])
def test_03_migrate_router_after_creating_vpc(self):
""" Test migration of router to another host after creating VPC """
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
self.migrate_router(routers[0])
return
@attr(tags=["advanced", "intervlan"])
def test_04_change_service_offerring_vpc(self):
""" Tests to change service offering of the Router after
creating a vpc
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Change the service offerings of the VPC Virtual Router which is created as a result of VPC creation.
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
#Stop the router
router = routers[0]
self.debug("Stopping the router with ID: %s" % router.id)
cmd = stopRouter.stopRouterCmd()
cmd.id = router.id
self.apiclient.stopRouter(cmd)
service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering_new"]
)
self.debug("Changing service offering for the Router %s" % router.id)
try:
router = Router.change_service_offering(self.apiclient,
router.id,
service_offering.id
)
except:
self.fail("Changing service offering failed")
self.debug("Router %s" % router)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
router = routers[0]
self.assertEqual(
router.serviceofferingid,
service_offering.id,
"Changing service offering failed as id is %s and expected"
"is %s" % (router.serviceofferingid, service_offering.id)
)
return
@attr(tags=["advanced", "intervlan"])
def test_05_destroy_router_after_creating_vpc(self):
""" Test to destroy the router after creating a VPC
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Destroy the VPC Virtual Router which is created as a result of VPC creation.
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
Router.destroy( self.apiclient,
id=routers[0].id
)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
False,
"List Routers should be empty"
)
return
class TestVPCRouterOneNetwork(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.apiclient = super(
TestVPCRouterOneNetwork,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient, cls.services)
cls.zone = get_zone(cls.apiclient, cls.services)
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offering"]
)
cls.vpc_off = VpcOffering.create(
cls.apiclient,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.apiclient, state='Enabled')
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls._cleanup = [cls.account]
cls.services["vpc"]["cidr"] = '10.1.1.1/16'
cls.vpc = VPC.create(
cls.apiclient,
cls.services["vpc"],
vpcofferingid=cls.vpc_off.id,
zoneid=cls.zone.id,
account=cls.account.name,
domainid=cls.account.domainid
)
cls.nw_off = NetworkOffering.create(
cls.apiclient,
cls.services["network_offering"],
conservemode=False
)
# Enable Network offering
cls.nw_off.update(cls.apiclient, state='Enabled')
cls._cleanup.append(cls.nw_off)
# Creating network using the network offering created
cls.network_1 = Network.create(
cls.apiclient,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.nw_off.id,
zoneid=cls.zone.id,
gateway='10.1.1.1',
vpcid=cls.vpc.id
)
# Spawn an instance in that network
vm_1 = VirtualMachine.create(
cls.apiclient,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id)]
)
vm_2 = VirtualMachine.create(
cls.apiclient,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id)]
)
# Spawn an instance in that network
vm_3 = VirtualMachine.create(
cls.apiclient,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id)]
)
vms = VirtualMachine.list(
cls.apiclient,
account=cls.account.name,
domainid=cls.account.domainid,
listall=True
)
public_ip_1 = PublicIPAddress.create(
cls.apiclient,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
nat_rule = NATRule.create(
cls.apiclient,
vm_1,
cls.services["natrule"],
ipaddressid=public_ip_1.ipaddress.id,
openfirewall=False,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
nwacl_nat = NetworkACL.create(
cls.apiclient,
networkid=cls.network_1.id,
services=cls.services["natrule"],
traffictype='Ingress'
)
public_ip_2 = PublicIPAddress.create(
cls.apiclient,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
try:
StaticNATRule.enable(
cls.apiclient,
ipaddressid=public_ip_2.ipaddress.id,
virtualmachineid=vm_2.id,
networkid=cls.network_1.id
)
except Exception as e:
cls.fail("Failed to enable static NAT on IP: %s - %s" % (
public_ip_2.ipaddress.ipaddress, e))
public_ips = PublicIPAddress.list(
cls.apiclient,
networkid=cls.network_1.id,
listall=True,
isstaticnat=True,
account=cls.account.name,
domainid=cls.account.domainid
)
public_ip_3 = PublicIPAddress.create(
cls.apiclient,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
lb_rule = LoadBalancerRule.create(
cls.apiclient,
cls.services["lbrule"],
ipaddressid=public_ip_3.ipaddress.id,
accountid=cls.account.name,
networkid=cls.network_1.id,
vpcid=cls.vpc.id,
domainid=cls.account.domainid
)
lb_rule.assign(cls.apiclient, [vm_3])
nwacl_lb = NetworkACL.create(
cls.apiclient,
networkid=cls.network_1.id,
services=cls.services["lbrule"],
traffictype='Ingress'
)
nwacl_internet_1 = NetworkACL.create(
cls.apiclient,
networkid=cls.network_1.id,
services=cls.services["http_rule"],
traffictype='Egress'
)
private_gateway = PrivateGateway.create(
cls.apiclient,
gateway='10.1.3.1',
ipaddress='10.1.3.100',
netmask='255.255.255.0',
vlan=678,
vpcid=cls.vpc.id
)
cls.gateways = PrivateGateway.list(
cls.apiclient,
id=private_gateway.id,
listall=True
)
static_route = StaticRoute.create(
cls.apiclient,
cidr='11.1.1.1/24',
gatewayid=private_gateway.id
)
cls.static_routes = StaticRoute.list(
cls.apiclient,
id=static_route.id,
listall=True
)
cls._cleanup = [
cls.service_offering,
cls.vpc_off
]
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = [self.account]
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def validate_vpc_offering(self, vpc_offering):
"""Validates the VPC offering"""
self.debug("Check if the VPC offering is created successfully?")
vpc_offs = VpcOffering.list(
self.apiclient,
id=vpc_offering.id
)
self.assertEqual(
isinstance(vpc_offs, list),
True,
"List VPC offerings should return a valid list"
)
self.assertEqual(
vpc_offering.name,
vpc_offs[0].name,
"Name of the VPC offering should match with listVPCOff data"
)
self.debug(
"VPC offering is created successfully - %s" %
vpc_offering.name)
return
def validate_vpc_network(self, network, state=None):
"""Validates the VPC network"""
self.debug("Check if the VPC network is created successfully?")
vpc_networks = VPC.list(
self.apiclient,
id=network.id
)
self.assertEqual(
isinstance(vpc_networks, list),
True,
"List VPC network should return a valid list"
)
self.assertEqual(
network.name,
vpc_networks[0].name,
"Name of the VPC network should match with listVPC data"
)
if state:
self.assertEqual(
vpc_networks[0].state,
state,
"VPC state should be '%s'" % state
)
self.debug("VPC network validated - %s" % network.name)
return
def validate_network_rules(self):
""" Validate network rules
"""
vms = VirtualMachine.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
public_ips = PublicIPAddress.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
for vm, public_ip in zip(vms, public_ips):
try:
ssh_1 = vm.get_ssh_client(
ipaddress=public_ip.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
self.debug("Verifying if we can ping to outside world from VM?")
# Ping to outsite world
res = ssh_1.execute("ping -c 1 www.google.com")
# res = 64 bytes from maa03s17-in-f20.1e100.net (74.125.236.212):
# icmp_req=1 ttl=57 time=25.9 ms
# --- www.l.google.com ping statistics ---
# 1 packets transmitted, 1 received, 0% packet loss, time 0ms
# rtt min/avg/max/mdev = 25.970/25.970/25.970/0.000 ms
except Exception as e:
self.fail("Failed to SSH into VM - %s, %s" %
(public_ip.ipaddress.ipaddress, e))
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to outside world from VM should be successful"
)
def migrate_router(self, router):
""" Migrate the router """
self.debug("Checking if the host is available for migration?")
hosts = Host.list(self.apiclient, zoneid=self.zone.id, type='Routing')
self.assertEqual(
isinstance(hosts, list),
True,
"List hosts should return a valid list"
)
if len(hosts) < 2:
raise unittest.SkipTest(
"No host available for migration. Test requires atleast 2 hosts")
# Remove the host of current VM from the hosts list
hosts[:] = [host for host in hosts if host.id != router.hostid]
host = hosts[0]
self.debug("Validating if the network rules work properly or not?")
self.debug("Migrating VM-ID: %s from %s to Host: %s" % (
router.id,
router.hostid,
host.id
))
try:
#Migrate the router
cmd = migrateSystemVm.migrateSystemVmCmd()
cmd.isAsync = "false"
cmd.hostid = host.id
cmd.virtualmachineid = router.id
self.apiclient.migrateSystemVm(cmd)
except Exception as e:
self.fail("Failed to migrate instance, %s" % e)
self.debug("Waiting for Router mgiration ....")
time.sleep(240)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
router.hostid = router_response[0].hostid
self.assertEqual(router.hostid, host.id, "Migration to host %s failed. The router host is"
"still %s" % (host.id, router.hostid))
return
@attr(tags=["advanced", "intervlan"])
def test_01_start_stop_router_after_addition_of_one_guest_network(self):
""" Test start/stop of router after addition of one guest network
"""
# Validations
#1. Create a VPC with cidr - 10.1.1.1/16
#2. Add network1(10.1.1.1/24) to this VPC.
#3. Deploy vm1,vm2 and vm3 such that they are part of network1.
#4. Create a PF /Static Nat/LB rule for vms in network1.
#5. Create ingress network ACL for allowing all the above rules from a public ip range on network1.
#6. Create egress network ACL for network1 to access google.com.
#7. Create a private gateway for this VPC and add a static route to this gateway.
#8. Create a VPN gateway for this VPC and add a static route to this gateway.
#9. Make sure that all the PF,LB and Static NAT rules work as expected.
#10. Make sure that we are able to access google.com from all the user Vms.
#11. Make sure that the newly added private gateway's and VPN gateway's static routes work as expected
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
self.assertEqual(
isinstance(self.gateways, list),
True,
"List private gateways should return a valid response"
)
self.assertEqual(
isinstance(self.static_routes, list),
True,
"List static route should return a valid response"
)
# Stop the VPC Router
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
router = routers[0]
self.debug("Stopping the router with ID: %s" % router.id)
#Stop the router
cmd = stopRouter.stopRouterCmd()
cmd.id = router.id
self.apiclient.stopRouter(cmd)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
#List router should have router in stopped state
self.assertEqual(
router_response[0].state,
'Stopped',
"Check list router response for router state"
)
self.debug("Stopped the router with ID: %s" % router.id)
# Start The Router
self.debug("Starting the router with ID: %s" % router.id)
cmd = startRouter.startRouterCmd()
cmd.id = router.id
self.apiclient.startRouter(cmd)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
#List router should have router in running state
self.assertEqual(
router_response[0].state,
'Running',
"Check list router response for router state"
)
self.debug("Started the router with ID: %s" % router.id)
return
@attr(tags=["advanced", "intervlan"])
def test_02_reboot_router_after_addition_of_one_guest_network(self):
""" Test reboot of router after addition of one guest network
"""
# Validations
#1. Create a VPC with cidr - 10.1.1.1/16
#2. Add network1(10.1.1.1/24) to this VPC.
#3. Deploy vm1,vm2 and vm3 such that they are part of network1.
#4. Create a PF /Static Nat/LB rule for vms in network1.
#5. Create ingress network ACL for allowing all the above rules from a public ip range on network1.
#6. Create egress network ACL for network1 to access google.com.
#7. Create a private gateway for this VPC and add a static route to this gateway.
#8. Create a VPN gateway for this VPC and add a static route to this gateway.
#9. Make sure that all the PF,LB and Static NAT rules work as expected.
#10. Make sure that we are able to access google.com from all the user Vms.
#11. Make sure that the newly added private gateway's and VPN gateway's static routes work as expected
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
self.assertEqual(
isinstance(self.gateways, list),
True,
"List private gateways should return a valid response"
)
self.assertEqual(
isinstance(self.static_routes, list),
True,
"List static route should return a valid response"
)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
router = routers[0]
self.debug("Rebooting the router ...")
#Reboot the router
cmd = rebootRouter.rebootRouterCmd()
cmd.id = router.id
self.apiclient.rebootRouter(cmd)
#List routers to check state of router
router_response = list_routers(
self.apiclient,
id=router.id
)
self.assertEqual(
isinstance(router_response, list),
True,
"Check list response returns a valid list"
)
#List router should have router in running state and same public IP
self.assertEqual(
router_response[0].state,
'Running',
"Check list router response for router state"
)
return
@attr(tags=["advanced", "intervlan"])
def test_03_migrate_router_after_addition_of_one_guest_network(self):
""" Test migrate of router after addition of one guest network
"""
# Validations
#1. Create a VPC with cidr - 10.1.1.1/16
#2. Add network1(10.1.1.1/24) to this VPC.
#3. Deploy vm1,vm2 and vm3 such that they are part of network1.
#4. Create a PF /Static Nat/LB rule for vms in network1.
#5. Create ingress network ACL for allowing all the above rules from a public ip range on network1.
#6. Create egress network ACL for network1 to access google.com.
#7. Create a private gateway for this VPC and add a static route to this gateway.
#8. Create a VPN gateway for this VPC and add a static route to this gateway.
#9. Make sure that all the PF,LB and Static NAT rules work as expected.
#10. Make sure that we are able to access google.com from all the user Vms.
#11. Make sure that the newly added private gateway's and VPN gateway's static routes work as expected
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
self.assertEqual(
isinstance(self.gateways, list),
True,
"List private gateways should return a valid response"
)
self.assertEqual(
isinstance(self.static_routes, list),
True,
"List static route should return a valid response"
)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
self.migrate_router(routers[0])
return
@attr(tags=["advanced", "intervlan"])
def test_04_chg_srv_off_router_after_addition_of_one_guest_network(self):
""" Test to change service offering of router after addition of one guest network
"""
# Validations
#1. Create a VPC with cidr - 10.1.1.1/16
#2. Add network1(10.1.1.1/24) to this VPC.
#3. Deploy vm1,vm2 and vm3 such that they are part of network1.
#4. Create a PF /Static Nat/LB rule for vms in network1.
#5. Create ingress network ACL for allowing all the above rules from a public ip range on network1.
#6. Create egress network ACL for network1 to access google.com.
#7. Create a private gateway for this VPC and add a static route to this gateway.
#8. Create a VPN gateway for this VPC and add a static route to this gateway.
#9. Make sure that all the PF,LB and Static NAT rules work as expected.
#10. Make sure that we are able to access google.com from all the user Vms.
#11. Make sure that the newly added private gateway's and VPN gateway's static routes work as expected
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
self.assertEqual(
isinstance(self.gateways, list),
True,
"List private gateways should return a valid response"
)
self.assertEqual(
isinstance(self.static_routes, list),
True,
"List static route should return a valid response"
)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
#Stop the router
router = routers[0]
self.debug("Stopping the router with ID: %s" % router.id)
cmd = stopRouter.stopRouterCmd()
cmd.id = router.id
self.apiclient.stopRouter(cmd)
service_offering = ServiceOffering.create(
self.apiclient,
self.services["service_offering_new"]
)
self.debug("Changing service offering for the Router %s" % router.id)
try:
router = Router.change_service_offering(self.apiclient,
router.id,
service_offering.id
)
except:
self.fail("Changing service offering failed")
self.debug("Router %s" % router)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
router = routers[0]
self.assertEqual(
router.serviceofferingid,
service_offering.id,
"Changing service offering failed as id is %s and expected"
"is %s" % (router.serviceofferingid, service_offering.id)
)
return
@attr(tags=["advanced", "intervlan"])
def test_05_destroy_router_after_addition_of_one_guest_network(self):
""" Test destroy of router after addition of one guest network
"""
# Validations
#1. Create a VPC with cidr - 10.1.1.1/16
#2. Add network1(10.1.1.1/24) to this VPC.
#3. Deploy vm1,vm2 and vm3 such that they are part of network1.
#4. Create a PF /Static Nat/LB rule for vms in network1.
#5. Create ingress network ACL for allowing all the above rules from a public ip range on network1.
#6. Create egress network ACL for network1 to access google.com.
#7. Create a private gateway for this VPC and add a static route to this gateway.
#8. Create a VPN gateway for this VPC and add a static route to this gateway.
#9. Make sure that all the PF,LB and Static NAT rules work as expected.
#10. Make sure that we are able to access google.com from all the user Vms.
#11. Make sure that the newly added private gateway's and VPN gateway's static routes work as expected
self.validate_vpc_offering(self.vpc_off)
self.validate_vpc_network(self.vpc)
self.assertEqual(
isinstance(self.gateways, list),
True,
"List private gateways should return a valid response"
)
self.assertEqual(
isinstance(self.static_routes, list),
True,
"List static route should return a valid response"
)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List Routers should return a valid list"
)
Router.destroy( self.apiclient,
id=routers[0].id
)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
False,
"List Routers should be empty"
)
return
| 43.934734
| 130
| 0.436165
| 5,003
| 58,565
| 5.037577
| 0.092744
| 0.031544
| 0.03571
| 0.018569
| 0.831885
| 0.81776
| 0.808158
| 0.795858
| 0.77955
| 0.767686
| 0
| 0.017226
| 0.494476
| 58,565
| 1,332
| 131
| 43.967718
| 0.834054
| 0.137232
| 0
| 0.716229
| 0
| 0
| 0.129335
| 0.004335
| 0
| 0
| 0
| 0
| 0.051506
| 1
| 0.025267
| false
| 0.001944
| 0.008746
| 0.000972
| 0.059281
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be1a64d1072edb02eda7bca8774b0fe000f88be8
| 5,202
|
py
|
Python
|
utils/mobjects/Object_Border.py
|
AStarySky/manim_sandbox
|
b1827c321337dbf0717f1806727e22528413f49a
|
[
"MIT"
] | 366
|
2020-03-06T13:50:13.000Z
|
2022-03-25T07:40:34.000Z
|
utils/mobjects/Object_Border.py
|
AStarySky/manim_sandbox
|
b1827c321337dbf0717f1806727e22528413f49a
|
[
"MIT"
] | 3
|
2020-03-16T00:52:04.000Z
|
2020-12-18T13:30:33.000Z
|
utils/mobjects/Object_Border.py
|
AStarySky/manim_sandbox
|
b1827c321337dbf0717f1806727e22528413f49a
|
[
"MIT"
] | 82
|
2020-03-15T01:45:02.000Z
|
2022-03-23T05:45:00.000Z
|
# from widcardw
#
#
# 这个Border写的太草了...
# CtrlT是会自动更新的, 只要传入一个物件, 就可以像TrackedPath一样用
# 但是我感觉不是特别好, 还不如用下面那个NoneUpdate的, 加个lambda become岂不美哉
# 或许对于作业4有点帮助吧
# 还有, 不许吐槽我为什么不用for循环, 我本来就只会硬刚
# 说不定今后有时间再加一个center_point呢
from manimlib.imports import *
class CtrlT(VGroup):
CONFIG = {"buff": SMALL_BUFF, "black_bg": False,
"add_corner": True}
def __init__(self, obj, **kwargs):
VGroup.__init__(self, **kwargs)
if not self.black_bg:
border = Rectangle(width=4, height=2).add_updater(
lambda b: b.become(Rectangle(width=obj.get_width()+2*self.buff, height=obj.get_height()+2*self.buff,
stroke_width=1, stroke_color="#000000").move_to(obj.get_center())))
corner_group = VGroup()
if self.add_corner:
dot_0 = Dot().add_updater(lambda d: d.become(
Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[0])
))
dot_1 = Dot().add_updater(lambda d: d.become(
Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[1])
))
dot_2 = Dot().add_updater(lambda d: d.become(
Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[2])
))
dot_3 = Dot().add_updater(lambda d: d.become(
Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[3])
))
corner_group.add(dot_0, dot_1, dot_2, dot_3)
else:
border = Rectangle(width=obj.get_width()+2*self.buff, height=obj.get_height()+2*self.buff,
stroke_color="#ffffff", stroke_width=1).add_updater(
lambda b: b.move_to(obj.get_center())
)
corner_group = VGroup()
if self.add_corner:
dot_0 = Dot().add_updater(lambda d: d.become(
Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[0])
))
dot_1 = Dot().add_updater(lambda d: d.become(
Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[1])
))
dot_2 = Dot().add_updater(lambda d: d.become(
Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[2])
))
dot_3 = Dot().add_updater(lambda d: d.become(
Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[3])
))
corner_group.add(dot_0, dot_1, dot_2, dot_3)
self.add(border, corner_group)
class BorderNoneUpdate(VGroup):
CONFIG = {"buff": SMALL_BUFF, "black_bg": False,
"add_corner": True}
def __init__(self, obj, **kwargs):
VGroup.__init__(self, **kwargs)
if not self.black_bg:
border = Rectangle(width=obj.get_width()+2*self.buff, height=obj.get_height()+2*self.buff,
stroke_width=1, stroke_color="#000000").move_to(obj.get_center())
corner_group = VGroup()
if self.add_corner:
dot_0 = Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[0])
dot_1 = Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[1])
dot_2 = Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[2])
dot_3 = Dot(color="#ffffff", stroke_width=1,
stroke_color="#000000").move_to(border.get_vertices()[3])
corner_group.add(dot_0, dot_1, dot_2, dot_3)
else:
border = Rectangle(width=obj.get_width()+2*self.buff, height=obj.get_height()+2*self.buff,
stroke_width=1, stroke_color="#ffffff").move_to(obj.get_center())
corner_group = VGroup()
if self.add_corner:
dot_0 = Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[0])
dot_1 = Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[1])
dot_2 = Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[2])
dot_3 = Dot(color="#000000", stroke_width=1,
stroke_color="#ffffff").move_to(border.get_vertices()[3])
corner_group.add(dot_0, dot_1, dot_2, dot_3)
self.add(border, corner_group)
| 50.019231
| 116
| 0.535179
| 610
| 5,202
| 4.286885
| 0.106557
| 0.08413
| 0.091778
| 0.130784
| 0.907839
| 0.885277
| 0.885277
| 0.885277
| 0.882983
| 0.882983
| 0
| 0.053643
| 0.329873
| 5,202
| 103
| 117
| 50.504854
| 0.6965
| 0.037486
| 0
| 0.784091
| 0
| 0
| 0.059247
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.011364
| 0
| 0.079545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be22e09aca2707788c31299717ade3f8991c8d8c
| 8,018
|
py
|
Python
|
landlab/components/erosion_deposition/tests/test_general_erodep.py
|
sequence-dev/landlab
|
a84fbf67a46de08bf8b6758bb316bff3423e746c
|
[
"MIT"
] | 1
|
2019-06-01T07:39:49.000Z
|
2019-06-01T07:39:49.000Z
|
landlab/components/erosion_deposition/tests/test_general_erodep.py
|
sequence-dev/landlab
|
a84fbf67a46de08bf8b6758bb316bff3423e746c
|
[
"MIT"
] | null | null | null |
landlab/components/erosion_deposition/tests/test_general_erodep.py
|
sequence-dev/landlab
|
a84fbf67a46de08bf8b6758bb316bff3423e746c
|
[
"MIT"
] | null | null | null |
from landlab import RasterModelGrid, HexModelGrid
from landlab.components import ErosionDeposition, FlowAccumulator
import numpy as np
from numpy import testing
import pytest
def test_Ff_bad_vals():
"""
Test that instantiating ErosionDeposition with a F_f value > 1 throws a
ValueError.
"""
#set up a 5x5 grid with one open outlet node and low initial elevations.
nr = 5
nc = 5
mg = RasterModelGrid((nr, nc), 10.0)
mg.add_zeros('node', 'topographic__elevation')
mg['node']['topographic__elevation'] += mg.node_y / 100000 \
+ mg.node_x / 100000 \
+ np.random.rand(len(mg.node_y)) / 10000
mg.set_closed_boundaries_at_grid_edges(bottom_is_closed=True,
left_is_closed=True,
right_is_closed=True,
top_is_closed=True)
mg.set_watershed_boundary_condition_outlet_id(0,
mg['node']['topographic__elevation'],
-9999.)
# Create a D8 flow handler
fa = FlowAccumulator(mg, flow_director='D8',
depression_finder='DepressionFinderAndRouter')
# Instantiate the ErosionDeposition component...
with pytest.raises(ValueError):
ErosionDeposition(mg, K=0.01, F_f=2.0, phi=0.5, v_s=0.001, m_sp=0.5,
n_sp=1.0, sp_crit=0.0, solver='basic')
def test_phi_bad_vals():
"""
Test that instantiating ErosionDeposition with a phi value >= 1 throws a
ValueError.
"""
#set up a 5x5 grid with one open outlet node and low initial elevations.
nr = 5
nc = 5
mg = RasterModelGrid((nr, nc), 10.0)
mg.add_zeros('node', 'topographic__elevation')
mg['node']['topographic__elevation'] += mg.node_y / 100000 \
+ mg.node_x / 100000 \
+ np.random.rand(len(mg.node_y)) / 10000
mg.set_closed_boundaries_at_grid_edges(bottom_is_closed=True,
left_is_closed=True,
right_is_closed=True,
top_is_closed=True)
mg.set_watershed_boundary_condition_outlet_id(0,
mg['node']['topographic__elevation'],
-9999.)
# Create a D8 flow handler
fa = FlowAccumulator(mg, flow_director='D8',
depression_finder='DepressionFinderAndRouter')
# Instantiate the ErosionDeposition component...
with pytest.raises(ValueError):
ErosionDeposition(mg, K=0.01, F_f=0.0, phi=2.0,
v_s=0.001, m_sp=0.5, n_sp=1.0, sp_crit=0.0,
solver='basic')
def test_q_as_field():
"""
Test that passing in water discharge as a grid field results in self.q
holding correct values
"""
#set up a 5x5 grid with one open outlet node and low initial elevations.
nr = 5
nc = 5
mg = RasterModelGrid((nr, nc), 10.0)
mg.add_zeros('node', 'topographic__elevation')
q = mg.add_zeros('node', 'user_imposed_discharge')
q[:] += 1.0 #add 1.0 m3/yr of water
mg['node']['topographic__elevation'] += mg.node_y / 100000 \
+ mg.node_x / 100000 \
+ np.random.rand(len(mg.node_y)) / 10000
mg.set_closed_boundaries_at_grid_edges(bottom_is_closed=True,
left_is_closed=True,
right_is_closed=True,
top_is_closed=True)
mg.set_watershed_boundary_condition_outlet_id(0,
mg['node']['topographic__elevation'],
-9999.)
# Create a D8 flow handler
fa = FlowAccumulator(mg, flow_director='D8',
depression_finder='DepressionFinderAndRouter')
# Instantiate the ErosionDeposition component...
ed = ErosionDeposition(mg, K=0.01, F_f=0.0, phi=0.0, v_s=0.001, m_sp=0.5,
n_sp=1.0, sp_crit=0.0,
discharge_field='user_imposed_discharge',
solver='basic')
#ensure that ed.q is everywhere equal to 1.0 m3/yr.
testing.assert_array_equal(np.ones(mg.number_of_nodes),
ed.q,
err_msg='E/D discharge field test failed',
verbose=True)
def test_q_as_array():
"""
Test that passing in water discharge as an array results in self.q
holding correct values
"""
#set up a 5x5 grid with one open outlet node and low initial elevations.
nr = 5
nc = 5
mg = RasterModelGrid((nr, nc), 10.0)
mg.add_zeros('node', 'topographic__elevation')
q = np.zeros(mg.number_of_nodes)
q[:] += 1.0 #add 1.0 m3/yr of water
mg['node']['topographic__elevation'] += mg.node_y / 100000 \
+ mg.node_x / 100000 \
+ np.random.rand(len(mg.node_y)) / 10000
mg.set_closed_boundaries_at_grid_edges(bottom_is_closed=True,
left_is_closed=True,
right_is_closed=True,
top_is_closed=True)
mg.set_watershed_boundary_condition_outlet_id(0,
mg['node']['topographic__elevation'],
-9999.)
# Create a D8 flow handler
fa = FlowAccumulator(mg, flow_director='D8',
depression_finder='DepressionFinderAndRouter')
# Instantiate the ErosionDeposition component...
ed = ErosionDeposition(mg, K=0.01, F_f=0.0, phi=0.0, v_s=0.001, m_sp=0.5,
n_sp=1.0, sp_crit=0.0,
discharge_field=q,
solver='basic')
#ensure that ed.q is everywhere equal to 1.0 m3/yr.
testing.assert_array_equal(np.ones(mg.number_of_nodes),
ed.q,
err_msg='E/D discharge array test failed',
verbose=True)
def test_sediment__flux_already_created():
"""
Test that an existing sediment flux grid field is not changed by
instantiating ErosionDeposition.
"""
#set up a 5x5 grid with one open outlet node and low initial elevations.
nr = 5
nc = 5
mg = RasterModelGrid((nr, nc), 10.0)
mg.add_zeros('node', 'topographic__elevation')
qs = mg.add_zeros('node', 'sediment__flux')
qs[:] += 1.0 #add 1.0 m3/yr of flux
mg['node']['topographic__elevation'] += mg.node_y / 100000 \
+ mg.node_x / 100000 \
+ np.random.rand(len(mg.node_y)) / 10000
mg.set_closed_boundaries_at_grid_edges(bottom_is_closed=True,
left_is_closed=True,
right_is_closed=True,
top_is_closed=True)
mg.set_watershed_boundary_condition_outlet_id(0,
mg['node']['topographic__elevation'],
-9999.)
# Create a D8 flow handler
fa = FlowAccumulator(mg, flow_director='D8',
depression_finder='DepressionFinderAndRouter')
# Instantiate the ErosionDeposition component...
ed = ErosionDeposition(mg, K=0.01, F_f=0.0, phi=0.0, v_s=0.001, m_sp=0.5,
n_sp=1.0, sp_crit=0.0, solver='basic')
#ensure that 'sediment__flux' field is everywhere equal to 1.0 m3/yr.
testing.assert_array_equal(np.ones(mg.number_of_nodes),
ed.qs,
err_msg='E/D sediment flux field test failed',
verbose=True)
| 39.890547
| 87
| 0.541033
| 964
| 8,018
| 4.26971
| 0.144191
| 0.036443
| 0.058309
| 0.063168
| 0.88241
| 0.874879
| 0.861273
| 0.845238
| 0.817784
| 0.817784
| 0
| 0.052208
| 0.364555
| 8,018
| 200
| 88
| 40.09
| 0.755643
| 0.174857
| 0
| 0.816
| 0
| 0
| 0.109692
| 0.076769
| 0
| 0
| 0
| 0
| 0.024
| 1
| 0.04
| false
| 0
| 0.04
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be26629952c541950e5c829110212f8373a9f372
| 248
|
py
|
Python
|
sfc_cae/__init__.py
|
acse-jy220/FPC-CAE
|
6e2fa3ec31795e66abdfe8faa01ffe36cc1e6b7a
|
[
"Apache-2.0"
] | null | null | null |
sfc_cae/__init__.py
|
acse-jy220/FPC-CAE
|
6e2fa3ec31795e66abdfe8faa01ffe36cc1e6b7a
|
[
"Apache-2.0"
] | null | null | null |
sfc_cae/__init__.py
|
acse-jy220/FPC-CAE
|
6e2fa3ec31795e66abdfe8faa01ffe36cc1e6b7a
|
[
"Apache-2.0"
] | 2
|
2021-07-25T16:37:29.000Z
|
2021-07-27T21:10:22.000Z
|
"""A ready-to-use, self-adjusting space-filling-curve (variational) convolutional autoencoder"""
from .utils import *
from .sfc_cae import *
from .sfc_cae_md import *
from .sfc_cae_adaptive import *
from .structured import *
from .training import *
| 35.428571
| 96
| 0.774194
| 35
| 248
| 5.342857
| 0.6
| 0.26738
| 0.208556
| 0.256684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120968
| 248
| 7
| 97
| 35.428571
| 0.857798
| 0.362903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
076c934dc1dfa43557975cbd5365ab5e81dc2079
| 128
|
py
|
Python
|
c_elegans_wiring/sub_modules/graph/graph_builder/__init__.py
|
adrameshiu/c-elegans-wiring
|
5eff187bbec6991864f73f3f4652b98225eab8e8
|
[
"MIT"
] | 1
|
2021-06-10T21:46:35.000Z
|
2021-06-10T21:46:35.000Z
|
c_elegans_wiring/sub_modules/graph/graph_builder/__init__.py
|
adrameshiu/Celegans-search
|
5eff187bbec6991864f73f3f4652b98225eab8e8
|
[
"MIT"
] | null | null | null |
c_elegans_wiring/sub_modules/graph/graph_builder/__init__.py
|
adrameshiu/Celegans-search
|
5eff187bbec6991864f73f3f4652b98225eab8e8
|
[
"MIT"
] | null | null | null |
# # allows other modules to import files from .graph, etc.
from .main_graph_builder import *
from .class_graph_builder import *
| 32
| 58
| 0.78125
| 19
| 128
| 5.052632
| 0.631579
| 0.25
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148438
| 128
| 3
| 59
| 42.666667
| 0.880734
| 0.421875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
076fbf423111356e6db094b8e8ea8821e18ddb8d
| 39,991
|
py
|
Python
|
assets/useragent/mozilla.py
|
ice-melt/python-lib
|
345e34fff7386d91acbb03a01fd4127c5dfed037
|
[
"MIT"
] | 74
|
2018-07-31T05:04:26.000Z
|
2021-02-18T05:51:22.000Z
|
assets/useragent/mozilla.py
|
ice-melt/python-lib
|
345e34fff7386d91acbb03a01fd4127c5dfed037
|
[
"MIT"
] | null | null | null |
assets/useragent/mozilla.py
|
ice-melt/python-lib
|
345e34fff7386d91acbb03a01fd4127c5dfed037
|
[
"MIT"
] | 39
|
2018-08-30T07:02:51.000Z
|
2021-03-22T11:47:01.000Z
|
mozilla = [
'Mozilla/5.0 (Windows; U; Windows NT 6.1; rv:2.2) Gecko/20110201',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; it; rv:2.0b4) Gecko/20100818',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9a3pre) Gecko/20070330',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.9.2a1pre) Gecko',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; pl; rv:1.9.2.3) Gecko/20100401 Lightningquail/3.6.3',
'Mozilla/5.0 (X11; ; Linux i686; rv:1.9.2.20) Gecko/20110805',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.2.13) Gecko/20101203 iPhone',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.13; ) Gecko/20101203',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1b3) Gecko/20090305',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.9.0.9) Gecko/2009040821',
'Mozilla/5.0 (X11; U; Linux i686; ru; rv:1.9.0.8) Gecko/2009032711',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.7) Gecko/2009032803',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.0.7) Gecko/2009021910 MEGAUPLOAD 1.0',
'Mozilla/5.0 (Windows; U; BeOS; en-US; rv:1.9.0.7) Gecko/2009021910',
'Mozilla/5.0 (X11; U; Linux i686; pl-PL; rv:1.9.0.6) Gecko/2009020911',
'Mozilla/5.0 (X11; U; Linux i686; en; rv:1.9.0.6) Gecko/20080528',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.6) Gecko/2009020409',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.3) Gecko/2008092814 (Debian-3.0.1-1)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.3) Gecko/2008092816',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.3) Gecko/2008090713',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.2) Gecko Fedora/1.9.0.2-1.fc9',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.14) Gecko/2009091010',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.10) Gecko/2009042523',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.1) Gecko/2008072610',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008072820 Ubuntu/8.04 (hardy) (Linux Mint)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.0.1) Gecko/2008070206',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-au; rv:1.9.0.1) Gecko/2008070206',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9) Gecko',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; cs; rv:1.9) Gecko/2008052906',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.8b2) Gecko/20050702',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8b) Gecko/20050217',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.8b) Gecko/20050217',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.8b) Gecko/20050217',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.8a6) Gecko/20050111',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.8a5) Gecko/20041122',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8a4) Gecko/20040927',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.8a4) Gecko/20040927',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8a3) Gecko/20040817',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8a1) Gecko/20040520',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.8a1) Gecko/20040520',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.8.1a2) Gecko/20060512',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.8) Gecko/20071022',
'Mozilla/5.001 (X11; U; Linux i686; rv:1.8.1.6; de-ch) Gecko/25250101 (ubuntu-feisty)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.4) Gecko/20070531',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.4) Gecko/20070508 (Debian-1.8.1.4-2ubuntu5)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.4) Gecko/20061201 Mozilla/5.0 (Linux Mint)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.3) Gecko/20061201 MEGAUPLOAD 1.0 (Ubuntu-feisty)',
'Mozilla/5.0 (X11; U; Linux i686; de; rv:1.8.1.3) Gecko/20070310',
'Mozilla/5.0 (X11; ; Linux i686; en-US; rv:1.8.1.3) Gecko',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.3) Gecko/20070321',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.3) Gecko/20070309 Mozilla/4.8 [en] (Windows NT 5.1; U)',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; nl-NL; rv:1.8.1.3) Gecko/20080722',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en; rv:1.8.1.2pre) Gecko/20070223',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.2) Gecko/20070208',
'Mozilla/5.0 (compatible; Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2) Gecko/20070219',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.18) Gecko/20081029',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.16) Gecko/20080702',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.8.1.15) Gecko/20080620 Mozilla/4.0',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.13) Gecko/20080313',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.8.1.12) Gecko/20080201',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.8.1.11) Gecko/20071127 Mozilla',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.11) Gecko/20071213',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.11) Gecko/20071206',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.11) Gecko/20071127',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.1) Gecko/20061205 Mozilla/5.0 (Debian-2.0.0.1+dfsg-2)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; pl; rv:1.8.1.1) Gecko/20061204',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.8.1.1) Gecko/20061204',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.8.1.1) Gecko/20061204',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.0.9) Gecko/20061206',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.0.6) Gecko/20060728',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.0.5) Gecko/20060719 KHTML/3.5.5',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.4) Gecko/20060912 pango-text',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.4) Gecko/20060508',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.14eol) Gecko/20070505 (Debian-1.8.0.15~pre080614d-0etch1)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.1) Gecko/20060126',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.8) Gecko/20051111',
'Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.7b) Gecko/20040429',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7b) Gecko/20040421',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7b) Gecko/20040316',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7b) Gecko/20040421',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.9) Gecko/20050711',
'Mozilla/5.0 (X11; U; Linux x86_64; de-AT; rv:1.7.8) Gecko/20050513 Debian/1.7.8-1',
'Mozilla/5.0 (X11; U; Linux i686; rv:1.7.8) Gecko/20061113 Debian/1.7.8-1sarge8',
'Mozilla/5.0 (X11; U; Linux i686; rv:1.7.8) Gecko/20060904 Debian/1.7.8-1sarge7.2.2',
'Mozilla/5.0 (X11; U; Linux i686; rv:1.7.8) Gecko/20060628 Debian/1.7.8-1sarge7.1',
'Mozilla/5.0 (X11; U; Linux i686; rv:1.7.8) Gecko/20050927 Debian/1.7.8-1sarge3',
'Mozilla/5.0 (X11; U; Linux i686; rv:1.7.8) Gecko/20050831 Debian/1.7.8-1sarge2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050927 Debian/1.7.8-1sarge3',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050921',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050831 Debian/1.7.8-1sarge2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050610',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050513 Debian/1.7.8-1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050512 Red Hat/1.7.8-1.1.3.1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050511',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.7.8) Gecko/20050511',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.8) Gecko/20050511',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.8) Gecko/20050511',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr; rv:1.7.8) Gecko/20050511',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.8) Gecko/20050511',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.8) Gecko/20050511 (No IDN)',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.8) Gecko/20050511',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7.7) Gecko/20050421',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.7) Gecko/20050427 Red Hat/1.7.7-1.1.3.4',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.7) Gecko/20050420 Debian/1.7.7-2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.7) Gecko/20050414',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.7) Gecko/20050415',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.7.7) Gecko/20050414',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.7) Gecko/20050414',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.7) Gecko/20050414',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.7) Gecko/20050414',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.7) Gecko/20050414',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.6) Gecko/20050328 Fedora/1.7.6-1.2.5',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.6) Gecko/20050319',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU; rv:1.7.6) Gecko/20050319',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.7.6) Gecko/20050319',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.6) Gecko/20050319',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.6) Gecko/20050225',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.6) Gecko/20050319',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.6) Gecko/20050319',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.7.6) Gecko/20050319',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7.5) Gecko/20041221',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.5) Gecko/20041221',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.5) Gecko/20041013',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; pl; rv:1.9.0.6) Gecko/2009011913 Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7.5) Gecko/20041221',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.5) Gecko/20041217',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.5) Gecko/20041217',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.5) Gecko/20041217',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; cs-CZ; rv:1.7.5) Gecko/20041217',
'Mozilla/5.0 (Windows NT 5.1; U; pt-br; rv:1.7.5) Gecko/20041110',
'Mozilla/5.0 (Windows NT 5.1; U; es-es; rv:1.7.5) Gecko/20041110',
'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.7.5) Gecko/20041110',
'Mozilla/5.0 (Windows NT 5.1; U; de; rv:1.7.5) Gecko/20041110',
'Mozilla/5.0 (OS/2; U; Warp 4.5; de-DE; rv:1.7.5) Gecko/20050523',
'Mozilla/5.0 (Macintosh; Intel Mac OS X; U; nb; rv:1.7.5) Gecko/20041110',
'Mozilla/5.0 (X11; U; Linux i686; hu; rv:1.7.3) Gecko/20050130',
'Mozilla/5.0 (X11; U; Linux i686; fr-FR; rv:1.7.3) Gecko/20040913',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.3) Gecko/20041007 Debian/1.7.3-5',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.3) Gecko/20040913',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; es-ES; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr-FR; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Win98; fr; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Win98; de-AT; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; fr; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; es-ES; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (X11; U; Linux i686; fr; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.2) Gecko/20040906',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.2) Gecko/20040906',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.2) Gecko/20040810 Debian/1.7.2-2',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (X11; U; FreeBSD i386; ja-JP; rv:1.7.2) Gecko/20050330',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.7.2) Gecko/20040709',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; U; Win 9x 4.90; de-AT; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; ; Windows NT 5.1; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7.13) Gecko/20060509',
'Mozilla/5.0 (X11; U; OpenBSD i386; en-US; rv:1.7.13) Gecko/20060901',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.13) Gecko/20060717 Debian/1.7.13-0.2ubuntu1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.13) Gecko/20060427 Debian/1.7.13-0ubuntu05.04',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.13) Gecko/20060417',
'Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US; rv:1.7.13) Gecko/20060417',
'Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.7.13) Gecko/20061230',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.7.13) Gecko/20060414',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.13) Gecko/20060414',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.13) Gecko/20060414',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.7.13) Gecko/20060414',
'Mozilla/4.0 (compatible; Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.13) Gecko/20060414; Windows NT 5.1)',
'Mozilla/5.0 (X11; U; Linux i686; es-ES; rv:1.7.12) Gecko/20050929',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060607 Debian/1.7.12-1.2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060216 Debian/1.7.12-1.1ubuntu2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060205 Debian/1.7.12-1.1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060202 Fedora/1.7.12-1.5.2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051203',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051013 Debian/1.7.12-1ubuntu1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051010 Debian/1.7.12-0ubuntu2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051007 Debian/1.7.12-1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050926',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050923',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050921 Red Hat/1.7.12-1.1.3.2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050921',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050920',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.12) Gecko/20060205 Debian/1.7.12-1.1',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.12) Gecko/20050923 Fedora/1.7.12-1.5.1',
'Mozilla/5.0 (X11; U; Linux i686; cs-CZ; rv:1.7.12) Gecko/20050929',
'Mozilla/5.0 (X11; U; Linux i686 (x86_64); fr; rv:1.7.12) Gecko/20051010 Debian/1.7.12-0ubuntu2',
'Mozilla/5.0 (X11; U; AIX 5.3; en-US; rv:1.7.12) Gecko/20051025',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.7.12) Gecko/20050915',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7.11) Gecko/20050802',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.11) Gecko/20050729',
'Mozilla/5.0 (Windows; U; WinNT4.0; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.11) Gecko/20050728 (No IDN)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Win95; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Win 9x 4.90; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; fr-FR; rv:1.7.11) Gecko/20050727',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.10) Gecko/20050811 Fedora/1.7.10-1.2.1.legacy',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.10) Gecko/20050727',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.10) Gecko/20050722',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.10) Gecko/20050716',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr-FR; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Win 9x 4.90; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (X11; U; OpenBSD i386; en-US; rv:1.7.0.13) Gecko/20060901',
'Mozilla/5.0 (X11; U; SunOS sun4v; en-US; rv:1.7) Gecko/20060120',
'Mozilla/5.0 (X11; U; SunOS sun4u; fr-FR; rv:1.7) Gecko/20040621',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7) Gecko/20060629',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7) Gecko/20060120',
'Mozilla/5.0 (X11; U; SunOS sun4u; de-DE; rv:1.7) Gecko/20070606',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20060627',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20051122',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20051027',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20050502',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20041221',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7) Gecko/20040514',
'Mozilla/5.0 (X11; U; FreeBSD; i386; it-IT; rv:1.7) Gecko',
'Mozilla/5.0 (X11; U; FreeBSD; i386; en-US; rv:1.7) Gecko',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; es-ES; rv:1.7) Gecko/20040803 Firefox/0.9.3',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7) Gecko/20040514',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.6a) Gecko/20031030',
'Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.6) Gecko/20040115',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040413 Debian/1.6-5',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040114',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.6) Gecko/20040115',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.6) Gecko/20040114',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; hu; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Photon; U; QNX x86pc; en-US; rv:1.6) Gecko/20040429',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5b) Gecko/20030827',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5b) Gecko/20030827',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5a) Gecko/20030718',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5a) Gecko/20030718',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.5.1) Gecko/20031120',
'Mozilla/5.0 (X11; U; SunOS5.10 sun4u; ja-JP; rv:1.5) Gecko/20031022',
'Mozilla/5.0 (X11; U; Linux i686; fr-FR; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5) Gecko/20030916',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5) Gecko/20030916',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Win98; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; WinNT4.0; it-IT; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4b) Gecko/20030427',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.4a) Gecko/20030318',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (X11; U; IRIX64 IP35; en-US; rv:1.4.3) Gecko/20040909',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4.2) Gecko/20040220',
'Mozilla/5.0 (X11; U; Linux i686; zh-CN; rv:1.4.1) Gecko/20031114',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4.1) Gecko/20040406',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4.1) Gecko/20031114',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4.1) Gecko/20031008',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.4) Gecko/20041224',
'Mozilla/5.0 (X11; U; Linux ppc; en-US; rv:1.4) Gecko/20030714 Debian/1.4-2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4) Gecko/20030908 Debian/1.4-4',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4) Gecko/20030828',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4) Gecko/20030827 Debian/1.4-3',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4) Gecko/20030821',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4) Gecko/20030818',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4) Gecko/20030723',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.4) Gecko/20030908 Debian/1.4-4',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.4) Gecko/20030812',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.4) Gecko/20030624',
'Mozilla/5.0 (X11; U; Linux i586; de-AT; rv:1.4) Gecko/20030908 Debian/1.4-4',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.4) Gecko/20030624',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4) Gecko/20030624',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4) Gecko/20030529',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.4) Gecko/20030624',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr-FR; rv:1.4) Gecko/20030624',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4) Gecko/20030624',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4) Gecko/20030612',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4) Gecko/20030529',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3b) Gecko/20030210',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3b) Gecko/20030125',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3b) Gecko/20021213',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.3b) Gecko/20030210',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.3b) Gecko/20030210',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.3b) Gecko/20030210',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.3b) Gecko/20030210',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.3b) Gecko/20030204',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.3b) Gecko/20030210',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.3b) Gecko/20030210',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.3a) Gecko/20021212',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.3a) Gecko/20021212',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.3a) Gecko/20021212',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.3a) Gecko/20021212',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.3a) Gecko/20021212',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.3a) Gecko/20021212',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.3.1) Gecko/20030509',
'Mozilla/5.0 (X11; U; Linux i686; hu-HU; rv:1.3.1) Gecko',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3.1) Gecko/20030428',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3.1) Gecko/20030425',
'Mozilla/5.0 (Windows; U; WinNT4.0; de-AT; rv:1.3.1) Gecko/20030425',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.3.1) Gecko/20030425',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.3.1) Gecko/20030425',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.3.1) Gecko/20030425',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.3.1) Gecko/20030425',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.3.1) Gecko/20030425',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.3) Gecko/20030318',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030523',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030413',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030401',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030327 Debian/1.3-4',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030326',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030320',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030314',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030313',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.3) Gecko/20030430 Debian/1.3-5',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.3) Gecko/20030327 Debian/1.3-4',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (X11; U; HP-UX 9000/785; en-US; rv:1.3) Gecko/20030321',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.3) Gecko/20030312',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (Macintosh; U; PPC; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.2b) Gecko/20021016',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (Macintosh; U; PPC; en-US; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (Macintosh; U; PPC; en-US; rv:1.2a) Gecko/20020910',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.2.1) Gecko/20030711',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.2.1) Gecko/20021217',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.2.1) Gecko/20021212',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.2.1) Gecko/20021205',
'Mozilla/5.0 (X11; U; Linux i686;en-US; rv:1.2.1) Gecko/20030225',
'Mozilla/5.0 (X11; U; Linux i686; zh-CN; rv:1.2.1) Gecko/20030225',
'Mozilla/5.0 (X11; U; Linux i686; es-AR; rv:1.2.1) Gecko/20021130',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20030427',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20030409 Debian/1.2.1-9woody2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20030225',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20030113',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20021213 Debian/1.2.1-2.bunk',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20021208 Debian/1.2.1-2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20021204',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20021203',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20021130',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.2.1) Gecko/20021226 Debian/1.2.1-9',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.2.1) Gecko/20021204',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.2.1) Gecko/20021130',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.2.1) Gecko/20021204',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2) Gecko/20021202',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2) Gecko/20021126',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.2) Gecko/20021203',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.2) Gecko/20050223',
'Mozilla/5.0 (X11; U; HP-UX 9000/785; en-US; rv:1.2) Gecko/20021203',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.2) Gecko/20021126',
'Mozilla/5.0 (Windows; U; WinNT4.0; de-AT; rv:1.2) Gecko/20021126',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.2) Gecko/20021126',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.2) Gecko/20021126',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.2) Gecko/20021126',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en-US; rv:1.2) Gecko/20021126',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.1b) Gecko/20020722',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.1b) Gecko/20020721',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.1b) Gecko/20020721',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.1a) Gecko/20020610',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.1a) Gecko/20020611',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.1a) Gecko/20020611',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.1a) Gecko/20020611',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en-US; rv:1.1a) Gecko/20020610',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.1a) Gecko/20020611',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.1a) Gecko/20020611',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.1) Gecko/20020925',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.1) Gecko/20020909',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.1) Gecko/20020827',
'Mozilla/5.0 (X11; U; Linux ppc; en-US; rv:1.1) Gecko/20020927',
'Mozilla/5.0 (X11; U; Linux i686; it-IT; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.1) Gecko/20020913 Debian/1.1-1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.1) Gecko/20020829',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.1) Gecko/20020828',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.1) Gecko/20020913 Debian/1.1-1',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; Linux i386; en-US; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.1) Gecko/20021223',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; WinNT4.0; de-AT; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr-FR; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0rc3) Gecko/20020529 Debian/1.0rc3-1',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0rc3) Gecko/20020523',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.0rc3) Gecko/20020523',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0rc2) Gecko/20020510',
'Mozilla/5.0 (X11; U; AIX 005A471A4C00; en-US; rv:1.0rc2) Gecko/20020514',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0rc2) Gecko/20020510',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.0rc2) Gecko/20020510',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.0rc2) Gecko/20020510',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0rc2) Gecko/20020510',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.0rc2) Gecko/20020510',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0rc1) Gecko/20020417',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.2) Gecko/20030716',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.0.2) Gecko/20030208',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.0.2) Gecko/20021216',
'Mozilla/5.0 (Macintosh; U; PPC; en-US; rv:1.0.2) Gecko/20021216',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20021203',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20021122 Debian/1.0.1-2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20021110',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20021003',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020919',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020918',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020912',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020903',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020830',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0.1) Gecko/20020815',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Win 9x 4.90; en-US; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020903',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.1) Gecko/20020830',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0.1) Gecko/20020826',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.0.0) Gecko/20020611',
'Mozilla/5.0 (X11; U; Linux ppc; en-US; rv:1.0.0) Gecko/20020622 Debian/1.0.0-0.woody.1',
'Mozilla/5.0 (X11; U; Linux i686; fr-FR; rv:1.0.0) Gecko/20020623 Debian/1.0.0-0.woody.1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.0) Gecko/20021004',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.0) Gecko/20020623 Debian/1.0.0-0.woody.1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.0) Gecko/20020612',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.0) Gecko/20020605',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.0.0) Gecko/20020529',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.0.0) Gecko/20020615 Debian/1.0.0-3',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:1.0.0) Gecko/20020623 Debian/1.0.0-0.woody.1',
'Mozilla/5.0 (X11; U; HP-UX 9000/785; en-US; rv:1.0.0) Gecko/20020605',
'Mozilla/5.0 (Windows; U; WinNT4.0; fr-FR; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.0.0) Gecko/20020509',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr-FR; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.0.0) Gecko/20020530',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.9) Gecko/20020513',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.9) Gecko/20020423',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.9) Gecko/20020408',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.9) Gecko/20020313',
'Mozilla/5.0 (X11; U; Linux i586; en-US; rv:0.9.9) Gecko/20020513',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:0.9.9) Gecko/20020311',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.8) Gecko/20020204',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:0.9.8) Gecko/20020204',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:0.9.8) Gecko/20020204',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.8) Gecko/20020204',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:0.9.7) Gecko/20011221',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.6) Gecko/20011202',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:0.9.5) Gecko/20011011',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:0.9.5) Gecko/20011011',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.4) Gecko/20010923',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.3) Gecko/20010801',
'Mozilla/5.0 (Macintosh; U; PPC; en-US; rv:0.9.3) Gecko/20010802',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.2.1) Gecko/20010901',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.2.1) Gecko/20010901',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.2) Gecko/20010809',
'Mozilla/5.001 (Macintosh; N; PPC; ja) Gecko/25250101'
]
| 77.652427
| 146
| 0.62424
| 8,325
| 39,991
| 2.997958
| 0.04973
| 0.047039
| 0.185351
| 0.092836
| 0.898469
| 0.888573
| 0.878436
| 0.862088
| 0.844499
| 0.825066
| 0
| 0.257358
| 0.161486
| 39,991
| 515
| 147
| 77.652427
| 0.486923
| 0
| 0
| 0.054369
| 0
| 0.994175
| 0.897054
| 0.009577
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
07a1d5cf6580db8face1a1145edeab61b609b660
| 169
|
py
|
Python
|
basic/__init__.py
|
magland/pyms
|
9baddaacfa48094a33056e31ff0e80fe98970f9a
|
[
"Apache-2.0"
] | 1
|
2018-03-29T15:12:57.000Z
|
2018-03-29T15:12:57.000Z
|
basic/__init__.py
|
magland/pyms
|
9baddaacfa48094a33056e31ff0e80fe98970f9a
|
[
"Apache-2.0"
] | null | null | null |
basic/__init__.py
|
magland/pyms
|
9baddaacfa48094a33056e31ff0e80fe98970f9a
|
[
"Apache-2.0"
] | null | null | null |
from .p_compute_templates import *
from .p_extract_clips import *
from .p_extract_geom import *
from .p_extract_timeseries import *
from .p_normalize_channels import *
| 28.166667
| 35
| 0.816568
| 25
| 169
| 5.12
| 0.44
| 0.195313
| 0.34375
| 0.421875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12426
| 169
| 5
| 36
| 33.8
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
07e9a3736fed23399c2846cec65ed88298b0ebc5
| 131
|
py
|
Python
|
kaggler/feature_selection/__init__.py
|
ppstacy/Kaggler
|
4fedc30153a4a74343fdec91468b96372873ebac
|
[
"MIT"
] | 743
|
2015-02-11T15:47:17.000Z
|
2022-03-29T06:58:57.000Z
|
kaggler/feature_selection/__init__.py
|
Drimdave/Kaggler
|
2a8027b534d881619fab8f095cff8876c6b39c55
|
[
"MIT"
] | 40
|
2015-03-16T17:44:40.000Z
|
2022-03-06T05:40:11.000Z
|
kaggler/feature_selection/__init__.py
|
Drimdave/Kaggler
|
2a8027b534d881619fab8f095cff8876c6b39c55
|
[
"MIT"
] | 178
|
2015-02-11T20:43:26.000Z
|
2021-11-27T12:46:35.000Z
|
from .feature_selection import DropInactive
from .feature_selection import DropLowInfo
__all__ = ['DropInactive', 'DropLowInfo']
| 21.833333
| 43
| 0.816794
| 13
| 131
| 7.769231
| 0.538462
| 0.217822
| 0.39604
| 0.514851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10687
| 131
| 5
| 44
| 26.2
| 0.863248
| 0
| 0
| 0
| 0
| 0
| 0.175573
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed0057ed57aa10a9819c204029cd531ca94bacf3
| 8,881
|
py
|
Python
|
official/nlp/keras_nlp/layers/on_device_embedding_test.py
|
akshit-protonn/models
|
38c8c6fe4144c93d6aadd19981c2b90570c29eba
|
[
"Apache-2.0"
] | 8
|
2021-12-30T06:07:14.000Z
|
2022-02-10T14:49:13.000Z
|
official/nlp/keras_nlp/layers/on_device_embedding_test.py
|
akshit-protonn/models
|
38c8c6fe4144c93d6aadd19981c2b90570c29eba
|
[
"Apache-2.0"
] | 62
|
2021-06-09T00:47:27.000Z
|
2021-09-24T09:06:58.000Z
|
official/nlp/keras_nlp/layers/on_device_embedding_test.py
|
akshit-protonn/models
|
38c8c6fe4144c93d6aadd19981c2b90570c29eba
|
[
"Apache-2.0"
] | 2
|
2021-08-17T22:07:17.000Z
|
2021-12-25T12:25:47.000Z
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Keras-based one-hot embedding layer."""
import numpy as np
import tensorflow as tf
from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import
from official.nlp.keras_nlp.layers import on_device_embedding
# This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It
# guarantees forward compatibility of this code for the V2 switchover.
@keras_parameterized.run_all_keras_modes
class OnDeviceEmbeddingTest(keras_parameterized.TestCase):
def test_layer_creation(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size, embedding_width=embedding_width)
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# The output should be the same as the input, save that it has an extra
# embedding_width dimension on the end.
expected_output_shape = [None, sequence_length, embedding_width]
self.assertEqual(expected_output_shape, output_tensor.shape.as_list())
self.assertEqual(output_tensor.dtype, tf.float32)
def test_layer_creation_with_mixed_precision(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size, embedding_width=embedding_width,
dtype="mixed_float16")
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# The output should be the same as the input, save that it has an extra
# embedding_width dimension on the end.
expected_output_shape = [None, sequence_length, embedding_width]
self.assertEqual(expected_output_shape, output_tensor.shape.as_list())
self.assertEqual(output_tensor.dtype, tf.float16)
def test_layer_invocation(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size, embedding_width=embedding_width)
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# Create a model from the test layer.
model = tf.keras.Model(input_tensor, output_tensor)
# Invoke the model on test data. We can't validate the output data itself
# (the NN is too complex) but this will rule out structural runtime errors.
batch_size = 3
input_data = np.random.randint(
vocab_size, size=(batch_size, sequence_length))
output = model.predict(input_data)
self.assertEqual(tf.float32, output.dtype)
def test_layer_invocation_with_mixed_precision(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size, embedding_width=embedding_width,
dtype="mixed_float16")
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# Create a model from the test layer.
model = tf.keras.Model(input_tensor, output_tensor)
# Invoke the model on test data. We can't validate the output data itself
# (the NN is too complex) but this will rule out structural runtime errors.
batch_size = 3
input_data = np.random.randint(
vocab_size, size=(batch_size, sequence_length))
output = model.predict(input_data)
self.assertEqual(tf.float16, output.dtype)
def test_one_hot_layer_creation(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size,
embedding_width=embedding_width,
use_one_hot=True)
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# The output should be the same as the input, save that it has an extra
# embedding_width dimension on the end.
expected_output_shape = [None, sequence_length, embedding_width]
self.assertEqual(expected_output_shape, output_tensor.shape.as_list())
self.assertEqual(output_tensor.dtype, tf.float32)
def test_one_hot_layer_creation_with_mixed_precision(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size,
embedding_width=embedding_width,
dtype="mixed_float16",
use_one_hot=True)
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# The output should be the same as the input, save that it has an extra
# embedding_width dimension on the end.
expected_output_shape = [None, sequence_length, embedding_width]
self.assertEqual(expected_output_shape, output_tensor.shape.as_list())
self.assertEqual(output_tensor.dtype, tf.float16)
def test_one_hot_layer_invocation(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size,
embedding_width=embedding_width,
use_one_hot=True)
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# Create a model from the test layer.
model = tf.keras.Model(input_tensor, output_tensor)
# Invoke the model on test data. We can't validate the output data itself
# (the NN is too complex) but this will rule out structural runtime errors.
batch_size = 3
input_data = np.random.randint(
vocab_size, size=(batch_size, sequence_length))
output = model.predict(input_data)
self.assertEqual(tf.float32, output.dtype)
def test_one_hot_layer_invocation_with_mixed_precision(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size,
embedding_width=embedding_width,
dtype="mixed_float16",
use_one_hot=True)
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# Create a model from the test layer.
model = tf.keras.Model(input_tensor, output_tensor)
# Invoke the model on test data. We can't validate the output data itself
# (the NN is too complex) but this will rule out structural runtime errors.
batch_size = 3
input_data = np.random.randint(
vocab_size, size=(batch_size, sequence_length))
output = model.predict(input_data)
self.assertEqual(tf.float16, output.dtype)
def test_use_scale_layer_invocation(self):
vocab_size = 31
embedding_width = 27
test_layer = on_device_embedding.OnDeviceEmbedding(
vocab_size=vocab_size, embedding_width=embedding_width,
scale_factor=embedding_width**0.5)
# Create a 2-dimensional input (the first dimension is implicit).
sequence_length = 23
input_tensor = tf.keras.Input(shape=(sequence_length), dtype=tf.int32)
output_tensor = test_layer(input_tensor)
# Create a model from the test layer.
model = tf.keras.Model(input_tensor, output_tensor)
# Invoke the model on test data. We can't validate the output data itself
# (the NN is too complex) but this will rule out structural runtime errors.
batch_size = 3
input_data = np.random.randint(
vocab_size, size=(batch_size, sequence_length))
output = model.predict(input_data)
self.assertEqual(tf.float32, output.dtype)
if __name__ == "__main__":
tf.test.main()
| 41.5
| 101
| 0.742146
| 1,249
| 8,881
| 5.048038
| 0.141713
| 0.079937
| 0.026963
| 0.021412
| 0.851229
| 0.851229
| 0.849326
| 0.847423
| 0.847423
| 0.847423
| 0
| 0.017351
| 0.182299
| 8,881
| 213
| 102
| 41.694836
| 0.850868
| 0.307623
| 0
| 0.866667
| 0
| 0
| 0.009843
| 0
| 0
| 0
| 0
| 0
| 0.096296
| 1
| 0.066667
| false
| 0
| 0.02963
| 0
| 0.103704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed2d041168bea96cee5b50310c56380377498662
| 570
|
py
|
Python
|
train_ricord1a_timm-regnetx_002_gaussian_blur.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_ricord1a_timm-regnetx_002_gaussian_blur.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
train_ricord1a_timm-regnetx_002_gaussian_blur.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold0_gaussian_blur.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold1_gaussian_blur.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold2_gaussian_blur.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold3_gaussian_blur.yml",
"python main.py --configs configs/train_ricord1a_unetplusplus_timm-regnetx_002_fold4_gaussian_blur.yml",
]
for l in ls:
os.system(l)
| 51.818182
| 108
| 0.854386
| 85
| 570
| 5.317647
| 0.294118
| 0.110619
| 0.132743
| 0.210177
| 0.85177
| 0.85177
| 0.85177
| 0.85177
| 0.85177
| 0.85177
| 0
| 0.046382
| 0.054386
| 570
| 11
| 109
| 51.818182
| 0.792208
| 0
| 0
| 0
| 0
| 0
| 0.884413
| 0.665499
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ed324b3cc4c245e36abfe1a2c5c7bf412ab9bfd7
| 3,378
|
py
|
Python
|
python/testData/inspections/PyUnboundLocalVariableInspection/TooLargeToAnalyze.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/PyUnboundLocalVariableInspection/TooLargeToAnalyze.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/PyUnboundLocalVariableInspection/TooLargeToAnalyze.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
f1 = 10
print(f1)
if f1:
a1 = 1
a2 = 2
a3 = 3
a4 = 4
a5 = 5
a6 = 6
elif f1:
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
a1 = 1
a2 = 2
a3 = 3
a4 = 4
a5 = 5
elif f1:
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
a1 = 1
a2 = 2
a3 = 3
a4 = 4
elif f1:
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
a1 = 1
a2 = 2
a3 = 3
elif f1:
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
a1 = 1
a2 = 2
elif f1:
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
a1 = 1
else:
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
print(f1)
| 11.373737
| 13
| 0.582297
| 589
| 3,378
| 3.339559
| 0.03056
| 0.946619
| 1.207931
| 1.843416
| 0.989832
| 0.989832
| 0.989832
| 0.989832
| 0.989832
| 0.989832
| 0
| 0.125396
| 0.251628
| 3,378
| 297
| 14
| 11.373737
| 0.65269
| 0
| 0
| 0.986441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.901695
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
ed35bc2f2f5bb81b718140ac6afd93e90cfb3a1f
| 110
|
py
|
Python
|
gym_snake/envs/__init__.py
|
Gregory-Eales/gym-snake
|
55c9b2e7d0400ba901eadc09355f6136bf1579cb
|
[
"MIT"
] | null | null | null |
gym_snake/envs/__init__.py
|
Gregory-Eales/gym-snake
|
55c9b2e7d0400ba901eadc09355f6136bf1579cb
|
[
"MIT"
] | null | null | null |
gym_snake/envs/__init__.py
|
Gregory-Eales/gym-snake
|
55c9b2e7d0400ba901eadc09355f6136bf1579cb
|
[
"MIT"
] | null | null | null |
from gym_snake.envs.snake_env import SnakeEnv
from gym_snake.envs.snake_extrahard_env import SnakeExtraHardEnv
| 55
| 64
| 0.9
| 17
| 110
| 5.529412
| 0.529412
| 0.148936
| 0.255319
| 0.340426
| 0.446809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063636
| 110
| 2
| 64
| 55
| 0.912621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed45942adb1bb0ea9919f4df313d66aa1fe170e7
| 5,328
|
py
|
Python
|
tests/command_test.py
|
Lissandre/stakkr
|
9948b3daa4e94cbce80376f24bae4f850c3bc4ca
|
[
"Apache-2.0"
] | null | null | null |
tests/command_test.py
|
Lissandre/stakkr
|
9948b3daa4e94cbce80376f24bae4f850c3bc4ca
|
[
"Apache-2.0"
] | null | null | null |
tests/command_test.py
|
Lissandre/stakkr
|
9948b3daa4e94cbce80376f24bae4f850c3bc4ca
|
[
"Apache-2.0"
] | null | null | null |
import io
import os
import re
import sys
import unittest
from contextlib import redirect_stdout
from stakkr.command import launch_cmd_displays_output
base_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, base_dir + '/../')
# https://docs.python.org/3/library/unittest.html#assert-methods
class CommandTest(unittest.TestCase):
cmd_ok = ['echo', 'coucou']
cmd_nook = ['cat', '/does/not/exist']
cmd_err = ['echoo']
def test_command_without_stdout_ok(self):
# TODO make it work under windows
if os.name == 'nt':
return
f = io.StringIO()
with redirect_stdout(f):
launch_cmd_displays_output(self.cmd_ok, False, False)
res = f.getvalue()
self.assertEqual('.', res[:1])
try:
from contextlib import redirect_stderr
except Exception:
return
f = io.StringIO()
with redirect_stderr(f):
launch_cmd_displays_output(self.cmd_ok, False, False)
res = f.getvalue()
self.assertEqual('', res)
def test_command_with_stdout_ok(self):
# TODO make it work under windows
if os.name == 'nt':
return
f = io.StringIO()
with redirect_stdout(f):
launch_cmd_displays_output(self.cmd_ok, True, False)
res = f.getvalue()
self.assertEqual('coucou\n\n', res)
try:
from contextlib import redirect_stderr
except Exception:
return
f = io.StringIO()
with redirect_stderr(f):
launch_cmd_displays_output(self.cmd_ok, True, False)
res = f.getvalue()
self.assertEqual('', res)
def test_command_with_stderr_no_stdout_ok(self):
# TODO make it work under windows
if os.name == 'nt':
return
f = io.StringIO()
with redirect_stdout(f):
launch_cmd_displays_output(self.cmd_ok, False, True)
res = f.getvalue()
self.assertEqual('.', res[:1])
try:
from contextlib import redirect_stderr
except Exception:
return
f = io.StringIO()
with redirect_stderr(f):
launch_cmd_displays_output(self.cmd_ok, False, True)
res = f.getvalue()
self.assertEqual('', res)
def test_command_exception(self):
with self.assertRaisesRegex(SystemError, r"Cannot run the command: \[.*Err.*2\]"):
launch_cmd_displays_output(self.cmd_err, True, True)
def test_command_without_stderr_and_stdout_err(self):
# TODO make it work under windows
if os.name == 'nt':
return
f = io.StringIO()
with redirect_stdout(f):
launch_cmd_displays_output(self.cmd_nook, False, False)
res = f.getvalue()
self.assertEqual('\n', res)
try:
from contextlib import redirect_stderr
except Exception:
return
f = io.StringIO()
with redirect_stderr(f):
launch_cmd_displays_output(self.cmd_nook, False, False)
res = f.getvalue()
self.assertEqual('', res)
def test_command_without_stderr_but_stdout_err(self):
# TODO make it work under windows
if os.name == 'nt':
return
f = io.StringIO()
with redirect_stdout(f):
launch_cmd_displays_output(self.cmd_nook, True, False)
res = f.getvalue()
self.assertEqual('\n', res)
try:
from contextlib import redirect_stderr
except Exception:
return
f = io.StringIO()
with redirect_stderr(f):
launch_cmd_displays_output(self.cmd_nook, True, False)
res = f.getvalue()
self.assertEqual('', res)
def test_command_with_stderr_no_stdout_err(self):
# TODO make it work under windows
if os.name == 'nt':
return
f = io.StringIO()
with redirect_stdout(f):
launch_cmd_displays_output(self.cmd_nook, False, True)
res = f.getvalue()
expected = re.compile('.*No such file or directory.*', re.MULTILINE)
self.assertRegex(res, expected)
try:
from contextlib import redirect_stderr
except Exception:
return
f = io.StringIO()
with redirect_stderr(f):
launch_cmd_displays_output(self.cmd_nook, False, True)
res = f.getvalue()
self.assertEqual('', res)
def test_command_with_stderr_no_stdout_err_loop(self):
# TODO make it work under windows
if os.name == 'nt':
return
f = io.StringIO()
with redirect_stdout(f):
cmd = ['cat', 'w', 'r', 'o', 'n', 'g', 'f', 'i', 'l', 'e']
launch_cmd_displays_output(cmd, False, True)
res = f.getvalue()
expected = re.compile(r'.*\.\.\. and more.*', re.MULTILINE)
self.assertRegex(res, expected)
try:
from contextlib import redirect_stderr
except Exception:
return
f = io.StringIO()
with redirect_stderr(f):
launch_cmd_displays_output(self.cmd_nook, False, True)
res = f.getvalue()
self.assertEqual('', res)
if __name__ == "__main__":
unittest.main()
| 28.956522
| 90
| 0.58765
| 639
| 5,328
| 4.677621
| 0.156495
| 0.048177
| 0.091
| 0.123118
| 0.822683
| 0.809301
| 0.799264
| 0.799264
| 0.780863
| 0.780863
| 0
| 0.001356
| 0.308183
| 5,328
| 183
| 91
| 29.114754
| 0.80955
| 0.053491
| 0
| 0.785714
| 0
| 0
| 0.033976
| 0
| 0
| 0
| 0
| 0.005464
| 0.107143
| 1
| 0.057143
| false
| 0
| 0.1
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed6fb6df75d06313f4d24356855e82c8f3e7c651
| 124
|
py
|
Python
|
pybasicbayes/util/__init__.py
|
dhb2128/pybasicbayes
|
61f65ad6c781288605ec5f7347efcc5dbd73c4fc
|
[
"MIT"
] | 122
|
2015-02-11T11:54:47.000Z
|
2021-10-31T16:45:55.000Z
|
pybasicbayes/util/__init__.py
|
dhb2128/pybasicbayes
|
61f65ad6c781288605ec5f7347efcc5dbd73c4fc
|
[
"MIT"
] | 30
|
2015-02-27T00:22:14.000Z
|
2022-01-06T17:46:26.000Z
|
pybasicbayes/util/__init__.py
|
dhb2128/pybasicbayes
|
61f65ad6c781288605ec5f7347efcc5dbd73c4fc
|
[
"MIT"
] | 51
|
2015-02-16T21:24:32.000Z
|
2021-11-02T15:38:16.000Z
|
from __future__ import absolute_import
__all__ = ['general','plot','stats','text']
from . import general, plot, stats, text
| 31
| 43
| 0.741935
| 16
| 124
| 5.1875
| 0.5625
| 0.26506
| 0.385542
| 0.481928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 124
| 3
| 44
| 41.333333
| 0.754545
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ed786e6112b3b072bad417e1e2a4f9519202c222
| 18,787
|
py
|
Python
|
gnosis/safe/tests/test_safe_create2_tx.py
|
gosuto-ai/gnosis-py
|
637ce5c92f208f0e1d4f5e0687c10f378cfdff56
|
[
"MIT"
] | 1
|
2021-10-21T06:43:52.000Z
|
2021-10-21T06:43:52.000Z
|
gnosis/safe/tests/test_safe_create2_tx.py
|
HonzaDajc/gnosis-py
|
a2ab705f98479c4e46ca2b225a4bc75a773d69a7
|
[
"MIT"
] | null | null | null |
gnosis/safe/tests/test_safe_create2_tx.py
|
HonzaDajc/gnosis-py
|
a2ab705f98479c4e46ca2b225a4bc75a773d69a7
|
[
"MIT"
] | 2
|
2021-07-14T10:02:16.000Z
|
2021-08-02T22:04:41.000Z
|
import logging
from django.test import TestCase
from eth_account import Account
from gnosis.eth.contracts import get_safe_contract, get_safe_V1_0_0_contract
from ..safe_create2_tx import SafeCreate2TxBuilder
from .safe_test_case import SafeTestCaseMixin
from .utils import generate_salt_nonce
logger = logging.getLogger(__name__)
LOG_TITLE_WIDTH = 100
class TestSafeCreationTx(SafeTestCaseMixin, TestCase):
def test_safe_create2_tx_builder(self):
w3 = self.w3
salt_nonce = generate_salt_nonce()
funder_account = self.ethereum_test_account
owners = [Account.create().address for _ in range(4)]
threshold = len(owners) - 1
gas_price = self.gas_price
safe_creation_tx = SafeCreate2TxBuilder(w3=w3,
master_copy_address=self.safe_contract_address,
proxy_factory_address=self.proxy_factory_contract_address
).build(owners=owners,
threshold=threshold,
salt_nonce=salt_nonce,
gas_price=gas_price)
self.assertEqual(safe_creation_tx.payment, safe_creation_tx.payment_ether)
self.send_tx({
'to': safe_creation_tx.safe_address,
'value': safe_creation_tx.payment,
}, funder_account)
funder_balance = self.ethereum_client.get_balance(funder_account.address)
ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce(funder_account,
self.safe_contract_address,
safe_creation_tx.safe_setup_data,
salt_nonce,
safe_creation_tx.gas,
safe_creation_tx.gas_price)
tx_receipt = w3.eth.wait_for_transaction_receipt(ethereum_tx_sent.tx_hash)
self.assertEqual(tx_receipt.status, 1)
# Funder balance must be bigger after a Safe deployment, as Safe deployment is a little overpriced
self.assertGreater(self.ethereum_client.get_balance(funder_account.address), funder_balance)
logs = self.proxy_factory_contract.events.ProxyCreation().processReceipt(tx_receipt)
log = logs[0]
self.assertIsNone(tx_receipt.contractAddress)
self.assertEqual(log['event'], 'ProxyCreation')
proxy_address = log['args']['proxy']
self.assertEqual(proxy_address, safe_creation_tx.safe_address)
self.assertEqual(ethereum_tx_sent.contract_address, safe_creation_tx.safe_address)
deployed_safe_proxy_contract = get_safe_contract(w3, proxy_address)
self.assertEqual(deployed_safe_proxy_contract.functions.getThreshold().call(), threshold)
self.assertEqual(deployed_safe_proxy_contract.functions.getOwners().call(), owners)
self.assertEqual(self.ethereum_client.get_balance(proxy_address), 0)
def test_safe_create2_tx_builder_v_1_0_0(self):
w3 = self.w3
tx_hash = get_safe_V1_0_0_contract(self.w3).constructor().transact({
'from': self.ethereum_test_account.address})
tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
master_copy = tx_receipt['contractAddress']
salt_nonce = generate_salt_nonce()
funder_account = self.ethereum_test_account
owners = [Account.create().address for _ in range(4)]
threshold = len(owners) - 1
gas_price = self.gas_price
safe_creation_tx = SafeCreate2TxBuilder(w3=w3,
master_copy_address=master_copy,
proxy_factory_address=self.proxy_factory_contract_address
).build(owners=owners,
threshold=threshold,
salt_nonce=salt_nonce,
gas_price=gas_price)
self.assertEqual(safe_creation_tx.payment, safe_creation_tx.payment_ether)
self.send_tx({
'to': safe_creation_tx.safe_address,
'value': safe_creation_tx.payment,
}, funder_account)
funder_balance = self.ethereum_client.get_balance(funder_account.address)
ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce(funder_account,
master_copy,
safe_creation_tx.safe_setup_data,
salt_nonce,
safe_creation_tx.gas,
safe_creation_tx.gas_price)
tx_receipt = w3.eth.wait_for_transaction_receipt(ethereum_tx_sent.tx_hash)
self.assertEqual(tx_receipt.status, 1)
# Funder balance must be bigger after a Safe deployment, as Safe deployment is a little overpriced
self.assertGreater(self.ethereum_client.get_balance(funder_account.address), funder_balance)
logs = self.proxy_factory_contract.events.ProxyCreation().processReceipt(tx_receipt)
log = logs[0]
self.assertIsNone(tx_receipt.contractAddress)
self.assertEqual(log['event'], 'ProxyCreation')
proxy_address = log['args']['proxy']
self.assertEqual(proxy_address, safe_creation_tx.safe_address)
self.assertEqual(ethereum_tx_sent.contract_address, safe_creation_tx.safe_address)
deployed_safe_proxy_contract = get_safe_contract(w3, proxy_address)
self.assertEqual(deployed_safe_proxy_contract.functions.VERSION().call(), '1.0.0')
self.assertEqual(deployed_safe_proxy_contract.functions.getThreshold().call(), threshold)
self.assertEqual(deployed_safe_proxy_contract.functions.getOwners().call(), owners)
self.assertEqual(self.ethereum_client.get_balance(proxy_address), 0)
def test_safe_create2_tx_builder_with_payment_receiver(self):
w3 = self.w3
salt_nonce = generate_salt_nonce()
payment_receiver = Account.create().address
funder_account = self.ethereum_test_account
owners = [Account.create().address for _ in range(4)]
threshold = len(owners) - 1
gas_price = self.gas_price
safe_creation_tx = SafeCreate2TxBuilder(w3=w3,
master_copy_address=self.safe_contract_address,
proxy_factory_address=self.proxy_factory_contract_address
).build(owners=owners,
threshold=threshold,
salt_nonce=salt_nonce,
gas_price=gas_price,
payment_receiver=payment_receiver)
self.assertEqual(safe_creation_tx.payment, safe_creation_tx.payment_ether)
self.send_tx({
'to': safe_creation_tx.safe_address,
'value': safe_creation_tx.payment,
}, funder_account)
ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce(funder_account,
self.safe_contract_address,
safe_creation_tx.safe_setup_data,
salt_nonce,
gas=safe_creation_tx.gas,
gas_price=safe_creation_tx.gas_price)
tx_receipt = w3.eth.wait_for_transaction_receipt(ethereum_tx_sent.tx_hash)
self.assertEqual(tx_receipt.status, 1)
logs = self.proxy_factory_contract.events.ProxyCreation().processReceipt(tx_receipt)
log = logs[0]
self.assertIsNone(tx_receipt.contractAddress)
self.assertEqual(log['event'], 'ProxyCreation')
proxy_address = log['args']['proxy']
self.assertEqual(proxy_address, safe_creation_tx.safe_address)
self.assertEqual(ethereum_tx_sent.contract_address, safe_creation_tx.safe_address)
deployed_safe_proxy_contract = get_safe_contract(w3, proxy_address)
self.assertEqual(deployed_safe_proxy_contract.functions.getThreshold().call(), threshold)
self.assertEqual(deployed_safe_proxy_contract.functions.getOwners().call(), owners)
self.assertEqual(self.ethereum_client.get_balance(proxy_address), 0)
self.assertEqual(self.ethereum_client.get_balance(payment_receiver), safe_creation_tx.payment)
def test_safe_create2_tx_builder_with_fixed_cost(self):
w3 = self.w3
salt_nonce = generate_salt_nonce()
funder_account = self.ethereum_test_account
owners = [Account.create().address for _ in range(4)]
threshold = len(owners) - 1
gas_price = self.gas_price
fixed_creation_cost = 123 # Wei
safe_creation_tx = SafeCreate2TxBuilder(w3=w3,
master_copy_address=self.safe_contract_address,
proxy_factory_address=self.proxy_factory_contract_address
).build(owners=owners,
threshold=threshold,
salt_nonce=salt_nonce,
gas_price=gas_price,
fixed_creation_cost=fixed_creation_cost)
self.assertEqual(safe_creation_tx.payment, fixed_creation_cost)
self.assertEqual(safe_creation_tx.payment_ether, safe_creation_tx.gas * safe_creation_tx.gas_price)
self.send_tx({
'to': safe_creation_tx.safe_address,
'value': safe_creation_tx.payment,
}, funder_account)
ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce(funder_account,
self.safe_contract_address,
safe_creation_tx.safe_setup_data,
salt_nonce,
gas=safe_creation_tx.gas,
gas_price=safe_creation_tx.gas_price)
tx_receipt = w3.eth.wait_for_transaction_receipt(ethereum_tx_sent.tx_hash)
self.assertEqual(tx_receipt.status, 1)
logs = self.proxy_factory_contract.events.ProxyCreation().processReceipt(tx_receipt)
log = logs[0]
self.assertIsNone(tx_receipt.contractAddress)
self.assertEqual(log['event'], 'ProxyCreation')
proxy_address = log['args']['proxy']
self.assertEqual(proxy_address, safe_creation_tx.safe_address)
self.assertEqual(ethereum_tx_sent.contract_address, safe_creation_tx.safe_address)
deployed_safe_proxy_contract = get_safe_contract(w3, proxy_address)
self.assertEqual(deployed_safe_proxy_contract.functions.getThreshold().call(), threshold)
self.assertEqual(deployed_safe_proxy_contract.functions.getOwners().call(), owners)
self.assertEqual(self.ethereum_client.get_balance(proxy_address), 0)
def test_safe_create2_tx_builder_with_token_payment(self):
w3 = self.w3
salt_nonce = generate_salt_nonce()
erc20_deployer = Account.create()
funder_account = self.ethereum_test_account
owners = [Account.create().address for _ in range(4)]
threshold = len(owners) - 1
gas_price = self.gas_price
token_amount = int(1e18)
erc20_contract = self.deploy_example_erc20(token_amount, erc20_deployer.address)
self.assertEqual(erc20_contract.functions.balanceOf(erc20_deployer.address).call(), token_amount)
# Send something to the erc20 deployer
self.send_tx({
'to': erc20_deployer.address,
'value': w3.toWei(1, 'ether')
}, funder_account)
safe_creation_tx = SafeCreate2TxBuilder(w3=w3,
master_copy_address=self.safe_contract_address,
proxy_factory_address=self.proxy_factory_contract_address
).build(owners=owners,
threshold=threshold,
salt_nonce=salt_nonce,
gas_price=gas_price,
payment_token=erc20_contract.address)
self.assertEqual(safe_creation_tx.payment_token, erc20_contract.address)
self.assertGreater(safe_creation_tx.payment, 0)
self.assertEqual(safe_creation_tx.payment_ether, safe_creation_tx.gas * safe_creation_tx.gas_price)
self.send_tx(erc20_contract.functions.transfer(safe_creation_tx.safe_address,
safe_creation_tx.payment).buildTransaction({'from': erc20_deployer.address}),
erc20_deployer)
self.assertEqual(erc20_contract.functions.balanceOf(safe_creation_tx.safe_address).call(),
safe_creation_tx.payment)
ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce(funder_account,
self.safe_contract_address,
safe_creation_tx.safe_setup_data,
salt_nonce,
gas=safe_creation_tx.gas,
gas_price=safe_creation_tx.gas_price)
tx_receipt = w3.eth.wait_for_transaction_receipt(ethereum_tx_sent.tx_hash)
self.assertEqual(tx_receipt.status, 1)
logs = self.proxy_factory_contract.events.ProxyCreation().processReceipt(tx_receipt)
log = logs[0]
self.assertIsNone(tx_receipt.contractAddress)
self.assertEqual(log['event'], 'ProxyCreation')
proxy_address = log['args']['proxy']
self.assertEqual(proxy_address, safe_creation_tx.safe_address)
self.assertEqual(ethereum_tx_sent.contract_address, safe_creation_tx.safe_address)
deployed_safe_proxy_contract = get_safe_contract(w3, proxy_address)
self.assertEqual(deployed_safe_proxy_contract.functions.getThreshold().call(), threshold)
self.assertEqual(deployed_safe_proxy_contract.functions.getOwners().call(), owners)
self.assertEqual(self.ethereum_client.get_balance(proxy_address), 0)
def test_safe_gas_with_multiple_owners(self):
logger.info("Test Safe Proxy create2 gas with multiple owners".center(LOG_TITLE_WIDTH, '-'))
w3 = self.w3
funder_account = self.ethereum_test_account
number_of_accounts = 10
for i in range(2, number_of_accounts):
salt_nonce = generate_salt_nonce()
owners = [Account.create().address for _ in range(i + 1)]
threshold = len(owners) - 1
gas_price = self.gas_price
safe_creation_tx = SafeCreate2TxBuilder(w3=w3,
master_copy_address=self.safe_contract_address,
proxy_factory_address=self.proxy_factory_contract_address
).build(owners=owners,
threshold=threshold,
salt_nonce=salt_nonce,
gas_price=gas_price)
self.send_tx({
'to': safe_creation_tx.safe_address,
'value': safe_creation_tx.payment,
}, funder_account)
ethereum_tx_sent = self.proxy_factory.deploy_proxy_contract_with_nonce(funder_account,
self.safe_contract_address,
safe_creation_tx.safe_setup_data,
salt_nonce,
gas=safe_creation_tx.gas,
gas_price=safe_creation_tx.gas_price)
tx_receipt = w3.eth.wait_for_transaction_receipt(ethereum_tx_sent.tx_hash)
self.assertEqual(tx_receipt.status, 1)
logs = self.proxy_factory_contract.events.ProxyCreation().processReceipt(tx_receipt)
log = logs[0]
self.assertIsNone(tx_receipt.contractAddress)
self.assertEqual(log['event'], 'ProxyCreation')
proxy_address = log['args']['proxy']
self.assertEqual(proxy_address, safe_creation_tx.safe_address)
self.assertEqual(ethereum_tx_sent.contract_address, safe_creation_tx.safe_address)
logger.info("Number of owners: %d - Gas estimated %d - Gas Used %d - Difference %d - Gas used per owner %d",
len(owners),
safe_creation_tx.gas,
tx_receipt.gasUsed,
safe_creation_tx.gas - tx_receipt.gasUsed,
tx_receipt.gasUsed // len(owners))
| 58.52648
| 132
| 0.561612
| 1,809
| 18,787
| 5.443339
| 0.075732
| 0.082868
| 0.096679
| 0.045699
| 0.871331
| 0.86016
| 0.831421
| 0.813547
| 0.809587
| 0.79476
| 0
| 0.011443
| 0.372013
| 18,787
| 320
| 133
| 58.709375
| 0.82319
| 0.012455
| 0
| 0.806691
| 0
| 0.003717
| 0.020433
| 0
| 0
| 0
| 0
| 0
| 0.219331
| 1
| 0.022305
| false
| 0
| 0.026022
| 0
| 0.052045
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92bc92121d7877f23c1312e7f7d9da993158000b
| 88,012
|
py
|
Python
|
sfof/python/euclid/dm/_dtd.py
|
sfarrens/sfof
|
f887abc0dbd1587fd7fbc7148b4704d1b5f4cdac
|
[
"MIT"
] | 13
|
2017-06-15T16:56:29.000Z
|
2021-12-08T20:44:39.000Z
|
sfof/python/euclid/dm/_dtd.py
|
umikanero/sfof
|
9aa7b09ccb12311a68373e4e516dee82fa5c428e
|
[
"MIT"
] | 6
|
2020-05-30T07:40:59.000Z
|
2020-11-30T12:25:14.000Z
|
sfof/python/euclid/dm/_dtd.py
|
umikanero/sfof
|
9aa7b09ccb12311a68373e4e516dee82fa5c428e
|
[
"MIT"
] | 4
|
2018-02-24T02:12:24.000Z
|
2021-06-03T07:22:15.000Z
|
# /home/sartor/pymodule/euclid/dm/_dtd.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:b271ffc25c73f887dbff4e054cfd41e733c35b7f
# Generated 2014-07-24 16:26:39.931170 by PyXB version 1.2.3
# Namespace http://euclid.esa.org/schema/bas/dtd [xmlns:dtd]
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:869ae486-133e-11e4-88d8-90b11c83965f')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.3'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import euclid.dm._utd as _ImportedBinding_euclid_dm__utd
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI(u'http://euclid.esa.org/schema/bas/dtd', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, unicode):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOfDouble
# superclasses pyxb.binding.datatypes.anySimpleType
class listOfDouble (pyxb.binding.basis.STD_list):
"""An unbounded list of doubles (space separated). Used for tabulated data."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOfDouble')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 15, 1)
_Documentation = u'An unbounded list of doubles (space separated). Used for tabulated data.'
_ItemType = pyxb.binding.datatypes.double
listOfDouble._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'listOfDouble', listOfDouble)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOfFloat
# superclasses pyxb.binding.datatypes.anySimpleType
class listOfFloat (pyxb.binding.basis.STD_list):
"""An unbounded list of float. Used for tabulated data."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOfFloat')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 45, 1)
_Documentation = u'An unbounded list of float. Used for tabulated data.'
_ItemType = pyxb.binding.datatypes.float
listOfFloat._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'listOfFloat', listOfFloat)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOfInteger1
# superclasses pyxb.binding.datatypes.anySimpleType
class listOfInteger1 (pyxb.binding.basis.STD_list):
"""An unbounded list of one Byte Integer."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOfInteger1')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 51, 1)
_Documentation = u'An unbounded list of one Byte Integer.'
_ItemType = pyxb.binding.datatypes.byte
listOfInteger1._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'listOfInteger1', listOfInteger1)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOfInteger2
# superclasses pyxb.binding.datatypes.anySimpleType
class listOfInteger2 (pyxb.binding.basis.STD_list):
"""An unbounded list of two Bytes (short) Integer."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOfInteger2')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 57, 1)
_Documentation = u'An unbounded list of two Bytes (short) Integer.'
_ItemType = pyxb.binding.datatypes.short
listOfInteger2._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'listOfInteger2', listOfInteger2)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOfInteger4
# superclasses pyxb.binding.datatypes.anySimpleType
class listOfInteger4 (pyxb.binding.basis.STD_list):
"""An unbounded list of 4 Bytes (int) Integer."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOfInteger4')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 63, 1)
_Documentation = u'An unbounded list of 4 Bytes (int) Integer.'
_ItemType = pyxb.binding.datatypes.int
listOfInteger4._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'listOfInteger4', listOfInteger4)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOfInteger8
# superclasses pyxb.binding.datatypes.anySimpleType
class listOfInteger8 (pyxb.binding.basis.STD_list):
"""An unbounded list of 8 Bytes (int) Integer."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOfInteger8')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 69, 1)
_Documentation = u'An unbounded list of 8 Bytes (int) Integer.'
_ItemType = pyxb.binding.datatypes.long
listOfInteger8._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'listOfInteger8', listOfInteger8)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}hexaString
class hexaString (pyxb.binding.datatypes.string):
"""An hexadecimal number."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'hexaString')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 75, 1)
_Documentation = u'An hexadecimal number.'
hexaString._CF_pattern = pyxb.binding.facets.CF_pattern()
hexaString._CF_pattern.addPattern(pattern=u'[0-9,A-F,a-f]*')
hexaString._InitializeFacetMap(hexaString._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'hexaString', hexaString)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}var64String
class var64String (pyxb.binding.datatypes.string):
"""Character string : exact length 64"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'var64String')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 83, 1)
_Documentation = u'Character string : exact length 64'
var64String._CF_length = pyxb.binding.facets.CF_length(value=pyxb.binding.datatypes.nonNegativeInteger(64L))
var64String._InitializeFacetMap(var64String._CF_length)
Namespace.addCategoryObject('typeBinding', u'var64String', var64String)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}positiveDouble
class positiveDouble (pyxb.binding.datatypes.double):
"""Double between 0 (inclusive) and infinity"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'positiveDouble')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 91, 1)
_Documentation = u'Double between 0 (inclusive) and infinity'
positiveDouble._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=positiveDouble, value=pyxb.binding.datatypes.double(0.0))
positiveDouble._InitializeFacetMap(positiveDouble._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', u'positiveDouble', positiveDouble)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}negativeDouble
class negativeDouble (pyxb.binding.datatypes.double):
"""Double between infinity and O (inclusive)"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'negativeDouble')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 99, 1)
_Documentation = u'Double between infinity and O (inclusive)'
negativeDouble._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=negativeDouble, value=pyxb.binding.datatypes.double(0.0))
negativeDouble._InitializeFacetMap(negativeDouble._CF_maxInclusive)
Namespace.addCategoryObject('typeBinding', u'negativeDouble', negativeDouble)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}percentValue
class percentValue (pyxb.binding.datatypes.double):
"""Double between 0 and 100 inclusive"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'percentValue')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 107, 1)
_Documentation = u'Double between 0 and 100 inclusive'
percentValue._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=percentValue, value=pyxb.binding.datatypes.double(100.0))
percentValue._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=percentValue, value=pyxb.binding.datatypes.double(0.0))
percentValue._InitializeFacetMap(percentValue._CF_maxInclusive,
percentValue._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', u'percentValue', percentValue)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}degAngle
class degAngle (pyxb.binding.datatypes.double):
"""Angle in degrees between -180 and 360 """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'degAngle')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 139, 1)
_Documentation = u'Angle in degrees between -180 and 360\t\t'
degAngle._CF_maxExclusive = pyxb.binding.facets.CF_maxExclusive(value_datatype=pyxb.binding.datatypes.double, value=pyxb.binding.datatypes.anySimpleType(u'360.0'))
degAngle._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=degAngle, value=pyxb.binding.datatypes.double(-180.0))
degAngle._InitializeFacetMap(degAngle._CF_maxExclusive,
degAngle._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', u'degAngle', degAngle)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}nameRestriction
class nameRestriction (pyxb.binding.datatypes.string):
"""Basic naming convention: length between 4 and 100 characters, white spaces collapsed"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'nameRestriction')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 148, 1)
_Documentation = u'Basic naming convention: length between 4 and 100 characters, white spaces collapsed'
nameRestriction._CF_minLength = pyxb.binding.facets.CF_minLength(value=pyxb.binding.datatypes.nonNegativeInteger(4L))
nameRestriction._CF_maxLength = pyxb.binding.facets.CF_maxLength(value=pyxb.binding.datatypes.nonNegativeInteger(100L))
nameRestriction._InitializeFacetMap(nameRestriction._CF_minLength,
nameRestriction._CF_maxLength)
Namespace.addCategoryObject('typeBinding', u'nameRestriction', nameRestriction)
# Atomic simple type: {http://euclid.esa.org/schema/bas/dtd}curveShape
class curveShape (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""The position of the points connecting the vertices (or end points) of a polygon in space (so on a sphere) may be a large circle (or geodesic) or a line (iso coordinate on an axis)."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'curveShape')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 260, 1)
_Documentation = u'The position of the points connecting the vertices (or end points) of a polygon in space (so on a sphere) may be a large circle (or geodesic) or a line (iso coordinate on an axis).'
curveShape._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=curveShape, enum_prefix=None)
curveShape.line = curveShape._CF_enumeration.addEnumeration(unicode_value=u'line', tag=u'line')
curveShape.great_circle = curveShape._CF_enumeration.addEnumeration(unicode_value=u'great circle', tag=u'great_circle')
curveShape._InitializeFacetMap(curveShape._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'curveShape', curveShape)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOf2Double
# superclasses listOfDouble
class listOf2Double (pyxb.binding.basis.STD_list):
"""Space separated list of 2 double values, used for array2D."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOf2Double')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 21, 1)
_Documentation = u'Space separated list of 2 double values, used for array2D.'
_ItemType = pyxb.binding.datatypes.double
listOf2Double._CF_length = pyxb.binding.facets.CF_length(value=pyxb.binding.datatypes.nonNegativeInteger(2L))
listOf2Double._InitializeFacetMap(listOf2Double._CF_length)
Namespace.addCategoryObject('typeBinding', u'listOf2Double', listOf2Double)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOf3Double
# superclasses listOfDouble
class listOf3Double (pyxb.binding.basis.STD_list):
"""Space separated list of 3 double values, used for matrix or Array3D."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOf3Double')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 29, 1)
_Documentation = u'Space separated list of 3 double values, used for matrix or Array3D.'
_ItemType = pyxb.binding.datatypes.double
listOf3Double._CF_length = pyxb.binding.facets.CF_length(value=pyxb.binding.datatypes.nonNegativeInteger(3L))
listOf3Double._InitializeFacetMap(listOf3Double._CF_length)
Namespace.addCategoryObject('typeBinding', u'listOf3Double', listOf3Double)
# List simple type: {http://euclid.esa.org/schema/bas/dtd}listOf6Double
# superclasses listOfDouble
class listOf6Double (pyxb.binding.basis.STD_list):
"""Space separated list of 6 double values, used for matrix."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'listOf6Double')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 37, 1)
_Documentation = u'Space separated list of 6 double values, used for matrix.'
_ItemType = pyxb.binding.datatypes.double
listOf6Double._CF_length = pyxb.binding.facets.CF_length(value=pyxb.binding.datatypes.nonNegativeInteger(6L))
listOf6Double._InitializeFacetMap(listOf6Double._CF_length)
Namespace.addCategoryObject('typeBinding', u'listOf6Double', listOf6Double)
# Complex type {http://euclid.esa.org/schema/bas/dtd}matrixDouble3x3 with content type ELEMENT_ONLY
class matrixDouble3x3 (pyxb.binding.basis.complexTypeDefinition):
"""A 3x3 double matrix"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'matrixDouble3x3')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 116, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element row1 uses Python identifier row1
__row1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row1'), 'row1', '__httpeuclid_esa_orgschemabasdtd_matrixDouble3x3_row1', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 121, 3), )
row1 = property(__row1.value, __row1.set, None, None)
# Element row2 uses Python identifier row2
__row2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row2'), 'row2', '__httpeuclid_esa_orgschemabasdtd_matrixDouble3x3_row2', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 122, 3), )
row2 = property(__row2.value, __row2.set, None, None)
# Element row3 uses Python identifier row3
__row3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row3'), 'row3', '__httpeuclid_esa_orgschemabasdtd_matrixDouble3x3_row3', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 123, 3), )
row3 = property(__row3.value, __row3.set, None, None)
_ElementMap.update({
__row1.name() : __row1,
__row2.name() : __row2,
__row3.name() : __row3
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'matrixDouble3x3', matrixDouble3x3)
# Complex type {http://euclid.esa.org/schema/bas/dtd}matrixDouble6x6 with content type ELEMENT_ONLY
class matrixDouble6x6 (pyxb.binding.basis.complexTypeDefinition):
"""A 6x6 double matrix"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'matrixDouble6x6')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 126, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element row1 uses Python identifier row1
__row1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row1'), 'row1', '__httpeuclid_esa_orgschemabasdtd_matrixDouble6x6_row1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 131, 3), )
row1 = property(__row1.value, __row1.set, None, None)
# Element row2 uses Python identifier row2
__row2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row2'), 'row2', '__httpeuclid_esa_orgschemabasdtd_matrixDouble6x6_row2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 132, 3), )
row2 = property(__row2.value, __row2.set, None, None)
# Element row3 uses Python identifier row3
__row3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row3'), 'row3', '__httpeuclid_esa_orgschemabasdtd_matrixDouble6x6_row3', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 133, 3), )
row3 = property(__row3.value, __row3.set, None, None)
# Element row4 uses Python identifier row4
__row4 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row4'), 'row4', '__httpeuclid_esa_orgschemabasdtd_matrixDouble6x6_row4', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 134, 3), )
row4 = property(__row4.value, __row4.set, None, None)
# Element row5 uses Python identifier row5
__row5 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row5'), 'row5', '__httpeuclid_esa_orgschemabasdtd_matrixDouble6x6_row5', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 135, 3), )
row5 = property(__row5.value, __row5.set, None, None)
# Element row6 uses Python identifier row6
__row6 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'row6'), 'row6', '__httpeuclid_esa_orgschemabasdtd_matrixDouble6x6_row6', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 136, 3), )
row6 = property(__row6.value, __row6.set, None, None)
_ElementMap.update({
__row1.name() : __row1,
__row2.name() : __row2,
__row3.name() : __row3,
__row4.name() : __row4,
__row5.name() : __row5,
__row6.name() : __row6
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'matrixDouble6x6', matrixDouble6x6)
# Complex type {http://euclid.esa.org/schema/bas/dtd}doubleUnit with content type ELEMENT_ONLY
class doubleUnit (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {http://euclid.esa.org/schema/bas/dtd}doubleUnit with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'doubleUnit')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 269, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Value uses Python identifier Value
__Value = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value'), 'Value', '__httpeuclid_esa_orgschemabasdtd_doubleUnit_Value', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 271, 3), )
Value = property(__Value.value, __Value.set, None, None)
# Element Unit uses Python identifier Unit
__Unit = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Unit'), 'Unit', '__httpeuclid_esa_orgschemabasdtd_doubleUnit_Unit', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 272, 3), )
Unit = property(__Unit.value, __Unit.set, None, None)
_ElementMap.update({
__Value.name() : __Value,
__Unit.name() : __Unit
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'doubleUnit', doubleUnit)
# Complex type {http://euclid.esa.org/schema/bas/dtd}curve2Type with content type ELEMENT_ONLY
class curve2Type (pyxb.binding.basis.complexTypeDefinition):
"""A curve in 2-D space, defined by its end points and a shape attribute (default: line or great circle)."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'curve2Type')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 240, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element P1 uses Python identifier P1
__P1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'P1'), 'P1', '__httpeuclid_esa_orgschemabasdtd_curve2Type_P1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 245, 3), )
P1 = property(__P1.value, __P1.set, None, None)
# Element P2 uses Python identifier P2
__P2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'P2'), 'P2', '__httpeuclid_esa_orgschemabasdtd_curve2Type_P2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 246, 3), )
P2 = property(__P2.value, __P2.set, None, None)
# Attribute CurveShape uses Python identifier CurveShape
__CurveShape = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CurveShape'), 'CurveShape', '__httpeuclid_esa_orgschemabasdtd_curve2Type_CurveShape', curveShape)
__CurveShape._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 248, 2)
__CurveShape._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 248, 2)
CurveShape = property(__CurveShape.value, __CurveShape.set, None, None)
_ElementMap.update({
__P1.name() : __P1,
__P2.name() : __P2
})
_AttributeMap.update({
__CurveShape.name() : __CurveShape
})
Namespace.addCategoryObject('typeBinding', u'curve2Type', curve2Type)
# Complex type {http://euclid.esa.org/schema/bas/dtd}curve3Type with content type ELEMENT_ONLY
class curve3Type (pyxb.binding.basis.complexTypeDefinition):
"""A curve in 3-D space, defined by its end points and a shape attribute (default: line or great circle)."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'curve3Type')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 250, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element P1 uses Python identifier P1
__P1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'P1'), 'P1', '__httpeuclid_esa_orgschemabasdtd_curve3Type_P1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 255, 3), )
P1 = property(__P1.value, __P1.set, None, None)
# Element P2 uses Python identifier P2
__P2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'P2'), 'P2', '__httpeuclid_esa_orgschemabasdtd_curve3Type_P2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 256, 3), )
P2 = property(__P2.value, __P2.set, None, None)
# Attribute CurveShape uses Python identifier CurveShape
__CurveShape = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CurveShape'), 'CurveShape', '__httpeuclid_esa_orgschemabasdtd_curve3Type_CurveShape', curveShape)
__CurveShape._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 258, 2)
__CurveShape._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 258, 2)
CurveShape = property(__CurveShape.value, __CurveShape.set, None, None)
_ElementMap.update({
__P1.name() : __P1,
__P2.name() : __P2
})
_AttributeMap.update({
__CurveShape.name() : __CurveShape
})
Namespace.addCategoryObject('typeBinding', u'curve3Type', curve3Type)
# Complex type {http://euclid.esa.org/schema/bas/dtd}array2D with content type ELEMENT_ONLY
class array2D (pyxb.binding.basis.complexTypeDefinition):
"""A 2D array with X axis (specific unit), Y axis (specific unit), listOf2Double, optionnally size of the array is provided. This structure is used for describing Mission parameter database."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'array2D')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 157, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element SizeOfArray uses Python identifier SizeOfArray
__SizeOfArray = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SizeOfArray'), 'SizeOfArray', '__httpeuclid_esa_orgschemabasdtd_array2D_SizeOfArray', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 162, 3), )
SizeOfArray = property(__SizeOfArray.value, __SizeOfArray.set, None, None)
# Element PairedValues uses Python identifier PairedValues
__PairedValues = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PairedValues'), 'PairedValues', '__httpeuclid_esa_orgschemabasdtd_array2D_PairedValues', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 163, 3), )
PairedValues = property(__PairedValues.value, __PairedValues.set, None, None)
# Attribute Xunit uses Python identifier Xunit
__Xunit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'Xunit'), 'Xunit', '__httpeuclid_esa_orgschemabasdtd_array2D_Xunit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__Xunit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 165, 2)
__Xunit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 165, 2)
Xunit = property(__Xunit.value, __Xunit.set, None, None)
# Attribute Yunit uses Python identifier Yunit
__Yunit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'Yunit'), 'Yunit', '__httpeuclid_esa_orgschemabasdtd_array2D_Yunit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__Yunit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 166, 2)
__Yunit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 166, 2)
Yunit = property(__Yunit.value, __Yunit.set, None, None)
_ElementMap.update({
__SizeOfArray.name() : __SizeOfArray,
__PairedValues.name() : __PairedValues
})
_AttributeMap.update({
__Xunit.name() : __Xunit,
__Yunit.name() : __Yunit
})
Namespace.addCategoryObject('typeBinding', u'array2D', array2D)
# Complex type {http://euclid.esa.org/schema/bas/dtd}array3D with content type ELEMENT_ONLY
class array3D (pyxb.binding.basis.complexTypeDefinition):
"""A 3D array with X axis (specific unit), Y axis (specific unit), Z axis (specific unit) listOf3Double, optionnally size of the array is provided. This structure is used for describing Mission parameter database."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'array3D')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 168, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element SizeOfArray uses Python identifier SizeOfArray
__SizeOfArray = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SizeOfArray'), 'SizeOfArray', '__httpeuclid_esa_orgschemabasdtd_array3D_SizeOfArray', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 173, 3), )
SizeOfArray = property(__SizeOfArray.value, __SizeOfArray.set, None, None)
# Element TripletValues uses Python identifier TripletValues
__TripletValues = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'TripletValues'), 'TripletValues', '__httpeuclid_esa_orgschemabasdtd_array3D_TripletValues', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 174, 3), )
TripletValues = property(__TripletValues.value, __TripletValues.set, None, None)
# Attribute Xunit uses Python identifier Xunit
__Xunit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'Xunit'), 'Xunit', '__httpeuclid_esa_orgschemabasdtd_array3D_Xunit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__Xunit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 176, 2)
__Xunit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 176, 2)
Xunit = property(__Xunit.value, __Xunit.set, None, None)
# Attribute Yunit uses Python identifier Yunit
__Yunit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'Yunit'), 'Yunit', '__httpeuclid_esa_orgschemabasdtd_array3D_Yunit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__Yunit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 177, 2)
__Yunit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 177, 2)
Yunit = property(__Yunit.value, __Yunit.set, None, None)
# Attribute Zunit uses Python identifier Zunit
__Zunit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'Zunit'), 'Zunit', '__httpeuclid_esa_orgschemabasdtd_array3D_Zunit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__Zunit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 178, 2)
__Zunit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 178, 2)
Zunit = property(__Zunit.value, __Zunit.set, None, None)
_ElementMap.update({
__SizeOfArray.name() : __SizeOfArray,
__TripletValues.name() : __TripletValues
})
_AttributeMap.update({
__Xunit.name() : __Xunit,
__Yunit.name() : __Yunit,
__Zunit.name() : __Zunit
})
Namespace.addCategoryObject('typeBinding', u'array3D', array3D)
# Complex type {http://euclid.esa.org/schema/bas/dtd}double1Type with content type SIMPLE
class double1Type (pyxb.binding.basis.complexTypeDefinition):
"""A double with single unit attribute that could be any of the standard unit defined in bas/."""
_TypeDefinition = pyxb.binding.datatypes.double
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'double1Type')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 180, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.double
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__httpeuclid_esa_orgschemabasdtd_double1Type_unit', _ImportedBinding_euclid_dm__utd.unit)
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 186, 4)
__unit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 186, 4)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'double1Type', double1Type)
# Complex type {http://euclid.esa.org/schema/bas/dtd}double2Type with content type ELEMENT_ONLY
class double2Type (pyxb.binding.basis.complexTypeDefinition):
"""A vector of 2 doubles ; components are separated. Each component of the vector should have the same unit. This unit could be a velocity unit, time unit, position unit, angle unit, spectral unit depending on the semantics of the parameter."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'double2Type')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 190, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element C1 uses Python identifier C1
__C1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'C1'), 'C1', '__httpeuclid_esa_orgschemabasdtd_double2Type_C1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 195, 3), )
C1 = property(__C1.value, __C1.set, None, None)
# Element C2 uses Python identifier C2
__C2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'C2'), 'C2', '__httpeuclid_esa_orgschemabasdtd_double2Type_C2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 196, 3), )
C2 = property(__C2.value, __C2.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasdtd_double2Type_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 198, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 198, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__C1.name() : __C1,
__C2.name() : __C2
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'double2Type', double2Type)
# Complex type {http://euclid.esa.org/schema/bas/dtd}double3Type with content type ELEMENT_ONLY
class double3Type (pyxb.binding.basis.complexTypeDefinition):
"""A vector of 3 doubles with separated components. Each component of the vector should have the same unit. This unit could be a velocity unit, time unit, position unit, angle unit, spectral unit depending on the semantics of the parameter."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'double3Type')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 200, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element C1 uses Python identifier C1
__C1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'C1'), 'C1', '__httpeuclid_esa_orgschemabasdtd_double3Type_C1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 205, 3), )
C1 = property(__C1.value, __C1.set, None, None)
# Element C2 uses Python identifier C2
__C2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'C2'), 'C2', '__httpeuclid_esa_orgschemabasdtd_double3Type_C2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 206, 3), )
C2 = property(__C2.value, __C2.set, None, None)
# Element C3 uses Python identifier C3
__C3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'C3'), 'C3', '__httpeuclid_esa_orgschemabasdtd_double3Type_C3', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 207, 3), )
C3 = property(__C3.value, __C3.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasdtd_double3Type_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 209, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 209, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__C1.name() : __C1,
__C2.name() : __C2,
__C3.name() : __C3
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'double3Type', double3Type)
# Complex type {http://euclid.esa.org/schema/bas/dtd}double4Type with content type ELEMENT_ONLY
class double4Type (pyxb.binding.basis.complexTypeDefinition):
"""A vector of 4 doubles (2x2 matrix). Each component of the vector should have the same unit. This unit could be a velocity unit, time unit, position unit, angle unit, spectral unit depending on the semantics of the parameter."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'double4Type')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 211, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element M11 uses Python identifier M11
__M11 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M11'), 'M11', '__httpeuclid_esa_orgschemabasdtd_double4Type_M11', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 216, 3), )
M11 = property(__M11.value, __M11.set, None, None)
# Element M12 uses Python identifier M12
__M12 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M12'), 'M12', '__httpeuclid_esa_orgschemabasdtd_double4Type_M12', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 217, 3), )
M12 = property(__M12.value, __M12.set, None, None)
# Element M21 uses Python identifier M21
__M21 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M21'), 'M21', '__httpeuclid_esa_orgschemabasdtd_double4Type_M21', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 218, 3), )
M21 = property(__M21.value, __M21.set, None, None)
# Element M22 uses Python identifier M22
__M22 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M22'), 'M22', '__httpeuclid_esa_orgschemabasdtd_double4Type_M22', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 219, 3), )
M22 = property(__M22.value, __M22.set, None, None)
# Attribute MijUnit uses Python identifier MijUnit
__MijUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'MijUnit'), 'MijUnit', '__httpeuclid_esa_orgschemabasdtd_double4Type_MijUnit', _ImportedBinding_euclid_dm__utd.unit)
__MijUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 221, 2)
__MijUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 221, 2)
MijUnit = property(__MijUnit.value, __MijUnit.set, None, None)
_ElementMap.update({
__M11.name() : __M11,
__M12.name() : __M12,
__M21.name() : __M21,
__M22.name() : __M22
})
_AttributeMap.update({
__MijUnit.name() : __MijUnit
})
Namespace.addCategoryObject('typeBinding', u'double4Type', double4Type)
# Complex type {http://euclid.esa.org/schema/bas/dtd}double9Type with content type ELEMENT_ONLY
class double9Type (pyxb.binding.basis.complexTypeDefinition):
"""A vector of 9 doubles (3x3 matrix). Each component of the vector should have the same unit. This unit could be a velocity unit, time unit, position unit, angle unit, spectral unit depending on the semantics of the parameter."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'double9Type')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 223, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element M11 uses Python identifier M11
__M11 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M11'), 'M11', '__httpeuclid_esa_orgschemabasdtd_double9Type_M11', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 228, 3), )
M11 = property(__M11.value, __M11.set, None, None)
# Element M12 uses Python identifier M12
__M12 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M12'), 'M12', '__httpeuclid_esa_orgschemabasdtd_double9Type_M12', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 229, 3), )
M12 = property(__M12.value, __M12.set, None, None)
# Element M13 uses Python identifier M13
__M13 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M13'), 'M13', '__httpeuclid_esa_orgschemabasdtd_double9Type_M13', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 230, 3), )
M13 = property(__M13.value, __M13.set, None, None)
# Element M21 uses Python identifier M21
__M21 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M21'), 'M21', '__httpeuclid_esa_orgschemabasdtd_double9Type_M21', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 231, 3), )
M21 = property(__M21.value, __M21.set, None, None)
# Element M22 uses Python identifier M22
__M22 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M22'), 'M22', '__httpeuclid_esa_orgschemabasdtd_double9Type_M22', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 232, 3), )
M22 = property(__M22.value, __M22.set, None, None)
# Element M23 uses Python identifier M23
__M23 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M23'), 'M23', '__httpeuclid_esa_orgschemabasdtd_double9Type_M23', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 233, 3), )
M23 = property(__M23.value, __M23.set, None, None)
# Element M31 uses Python identifier M31
__M31 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M31'), 'M31', '__httpeuclid_esa_orgschemabasdtd_double9Type_M31', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 234, 3), )
M31 = property(__M31.value, __M31.set, None, None)
# Element M32 uses Python identifier M32
__M32 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M32'), 'M32', '__httpeuclid_esa_orgschemabasdtd_double9Type_M32', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 235, 3), )
M32 = property(__M32.value, __M32.set, None, None)
# Element M33 uses Python identifier M33
__M33 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'M33'), 'M33', '__httpeuclid_esa_orgschemabasdtd_double9Type_M33', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 236, 3), )
M33 = property(__M33.value, __M33.set, None, None)
# Attribute MijUnit uses Python identifier MijUnit
__MijUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'MijUnit'), 'MijUnit', '__httpeuclid_esa_orgschemabasdtd_double9Type_MijUnit', _ImportedBinding_euclid_dm__utd.unit)
__MijUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 238, 2)
__MijUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 238, 2)
MijUnit = property(__MijUnit.value, __MijUnit.set, None, None)
_ElementMap.update({
__M11.name() : __M11,
__M12.name() : __M12,
__M13.name() : __M13,
__M21.name() : __M21,
__M22.name() : __M22,
__M23.name() : __M23,
__M31.name() : __M31,
__M32.name() : __M32,
__M33.name() : __M33
})
_AttributeMap.update({
__MijUnit.name() : __MijUnit
})
Namespace.addCategoryObject('typeBinding', u'double9Type', double9Type)
matrixDouble3x3._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row1'), listOf3Double, scope=matrixDouble3x3, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 121, 3)))
matrixDouble3x3._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row2'), listOf3Double, scope=matrixDouble3x3, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 122, 3)))
matrixDouble3x3._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row3'), listOf3Double, scope=matrixDouble3x3, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 123, 3)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=3L, max=3L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 120, 2))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(matrixDouble3x3._UseForTag(pyxb.namespace.ExpandedName(None, u'row1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 121, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(matrixDouble3x3._UseForTag(pyxb.namespace.ExpandedName(None, u'row2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 122, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(matrixDouble3x3._UseForTag(pyxb.namespace.ExpandedName(None, u'row3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 123, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
matrixDouble3x3._Automaton = _BuildAutomaton()
matrixDouble6x6._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row1'), listOf6Double, scope=matrixDouble6x6, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 131, 3)))
matrixDouble6x6._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row2'), listOf6Double, scope=matrixDouble6x6, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 132, 3)))
matrixDouble6x6._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row3'), listOf6Double, scope=matrixDouble6x6, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 133, 3)))
matrixDouble6x6._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row4'), listOf6Double, scope=matrixDouble6x6, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 134, 3)))
matrixDouble6x6._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row5'), listOf6Double, scope=matrixDouble6x6, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 135, 3)))
matrixDouble6x6._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'row6'), listOf6Double, scope=matrixDouble6x6, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 136, 3)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(matrixDouble6x6._UseForTag(pyxb.namespace.ExpandedName(None, u'row1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 131, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(matrixDouble6x6._UseForTag(pyxb.namespace.ExpandedName(None, u'row2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 132, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(matrixDouble6x6._UseForTag(pyxb.namespace.ExpandedName(None, u'row3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 133, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(matrixDouble6x6._UseForTag(pyxb.namespace.ExpandedName(None, u'row4')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 134, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(matrixDouble6x6._UseForTag(pyxb.namespace.ExpandedName(None, u'row5')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 135, 3))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(matrixDouble6x6._UseForTag(pyxb.namespace.ExpandedName(None, u'row6')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 136, 3))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
st_4._set_transitionSet(transitions)
transitions = []
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
matrixDouble6x6._Automaton = _BuildAutomaton_()
doubleUnit._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value'), pyxb.binding.datatypes.double, scope=doubleUnit, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 271, 3)))
doubleUnit._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Unit'), _ImportedBinding_euclid_dm__utd.unit, scope=doubleUnit, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 272, 3)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(doubleUnit._UseForTag(pyxb.namespace.ExpandedName(None, u'Value')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 271, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(doubleUnit._UseForTag(pyxb.namespace.ExpandedName(None, u'Unit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 272, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
doubleUnit._Automaton = _BuildAutomaton_2()
curve2Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'P1'), double2Type, scope=curve2Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 245, 3)))
curve2Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'P2'), double2Type, scope=curve2Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 246, 3)))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(curve2Type._UseForTag(pyxb.namespace.ExpandedName(None, u'P1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 245, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(curve2Type._UseForTag(pyxb.namespace.ExpandedName(None, u'P2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 246, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
curve2Type._Automaton = _BuildAutomaton_3()
curve3Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'P1'), double3Type, scope=curve3Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 255, 3)))
curve3Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'P2'), double3Type, scope=curve3Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 256, 3)))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(curve3Type._UseForTag(pyxb.namespace.ExpandedName(None, u'P1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 255, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(curve3Type._UseForTag(pyxb.namespace.ExpandedName(None, u'P2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 256, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
curve3Type._Automaton = _BuildAutomaton_4()
array2D._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SizeOfArray'), pyxb.binding.datatypes.long, scope=array2D, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 162, 3)))
array2D._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PairedValues'), listOf2Double, scope=array2D, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 163, 3)))
def _BuildAutomaton_5 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 162, 3))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(array2D._UseForTag(pyxb.namespace.ExpandedName(None, u'SizeOfArray')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 162, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(array2D._UseForTag(pyxb.namespace.ExpandedName(None, u'PairedValues')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 163, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
array2D._Automaton = _BuildAutomaton_5()
array3D._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SizeOfArray'), pyxb.binding.datatypes.long, scope=array3D, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 173, 3)))
array3D._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'TripletValues'), listOf3Double, scope=array3D, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 174, 3)))
def _BuildAutomaton_6 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 173, 3))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(array3D._UseForTag(pyxb.namespace.ExpandedName(None, u'SizeOfArray')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 173, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(array3D._UseForTag(pyxb.namespace.ExpandedName(None, u'TripletValues')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 174, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
array3D._Automaton = _BuildAutomaton_6()
double2Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'C1'), double1Type, scope=double2Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 195, 3)))
double2Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'C2'), double1Type, scope=double2Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 196, 3)))
def _BuildAutomaton_7 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_7
del _BuildAutomaton_7
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(double2Type._UseForTag(pyxb.namespace.ExpandedName(None, u'C1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 195, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(double2Type._UseForTag(pyxb.namespace.ExpandedName(None, u'C2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 196, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
double2Type._Automaton = _BuildAutomaton_7()
double3Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'C1'), double1Type, scope=double3Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 205, 3)))
double3Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'C2'), double1Type, scope=double3Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 206, 3)))
double3Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'C3'), double1Type, scope=double3Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 207, 3)))
def _BuildAutomaton_8 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_8
del _BuildAutomaton_8
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(double3Type._UseForTag(pyxb.namespace.ExpandedName(None, u'C1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 205, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(double3Type._UseForTag(pyxb.namespace.ExpandedName(None, u'C2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 206, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(double3Type._UseForTag(pyxb.namespace.ExpandedName(None, u'C3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 207, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
double3Type._Automaton = _BuildAutomaton_8()
double4Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M11'), pyxb.binding.datatypes.double, scope=double4Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 216, 3)))
double4Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M12'), pyxb.binding.datatypes.double, scope=double4Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 217, 3)))
double4Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M21'), pyxb.binding.datatypes.double, scope=double4Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 218, 3)))
double4Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M22'), pyxb.binding.datatypes.double, scope=double4Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 219, 3)))
def _BuildAutomaton_9 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_9
del _BuildAutomaton_9
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(double4Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M11')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 216, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(double4Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M12')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 217, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(double4Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M21')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 218, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(double4Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M22')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 219, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
double4Type._Automaton = _BuildAutomaton_9()
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M11'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 228, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M12'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 229, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M13'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 230, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M21'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 231, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M22'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 232, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M23'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 233, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M31'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 234, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M32'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 235, 3)))
double9Type._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'M33'), pyxb.binding.datatypes.double, scope=double9Type, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 236, 3)))
def _BuildAutomaton_10 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_10
del _BuildAutomaton_10
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M11')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 228, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M12')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 229, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M13')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 230, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M21')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 231, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M22')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 232, 3))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M23')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 233, 3))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M31')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 234, 3))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = None
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M32')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 235, 3))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
symbol = pyxb.binding.content.ElementUse(double9Type._UseForTag(pyxb.namespace.ExpandedName(None, u'M33')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/dtd/euc-test-dtd.xsd', 236, 3))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
]))
st_7._set_transitionSet(transitions)
transitions = []
st_8._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
double9Type._Automaton = _BuildAutomaton_10()
| 60.656099
| 339
| 0.767043
| 11,317
| 88,012
| 5.781568
| 0.046214
| 0.040517
| 0.041571
| 0.061623
| 0.858826
| 0.843894
| 0.812532
| 0.794712
| 0.786551
| 0.739814
| 0
| 0.026966
| 0.10886
| 88,012
| 1,450
| 340
| 60.697931
| 0.80727
| 0.07474
| 0
| 0.502756
| 1
| 0.185226
| 0.284167
| 0.24756
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.034179
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
92d79b7cfb1111cf79a5b879bdc0bdfbaae38d76
| 218,204
|
py
|
Python
|
services/core/src/oci_cli_blockstorage/generated/blockstorage_cli.py
|
andrewtvuong/oci-cli
|
7673a808613308a4899c7026964fa2383c30c397
|
[
"Apache-2.0"
] | null | null | null |
services/core/src/oci_cli_blockstorage/generated/blockstorage_cli.py
|
andrewtvuong/oci-cli
|
7673a808613308a4899c7026964fa2383c30c397
|
[
"Apache-2.0"
] | null | null | null |
services/core/src/oci_cli_blockstorage/generated/blockstorage_cli.py
|
andrewtvuong/oci-cli
|
7673a808613308a4899c7026964fa2383c30c397
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
from __future__ import print_function
import click
import oci # noqa: F401
import six # noqa: F401
import sys # noqa: F401
from oci_cli import cli_constants # noqa: F401
from oci_cli import cli_util
from oci_cli import json_skeleton_utils
from oci_cli import custom_types # noqa: F401
from oci_cli.aliasing import CommandGroupWithAlias
from oci_cli_core.generated import core_service_cli
@click.command(cli_util.override('blockstorage_root_group.command_name', 'blockstorage'), cls=CommandGroupWithAlias, help=cli_util.override('blockstorage_root_group.help', """API covering the [Networking](/iaas/Content/Network/Concepts/overview.htm),
[Compute](/iaas/Content/Compute/Concepts/computeoverview.htm), and
[Block Volume](/iaas/Content/Block/Concepts/overview.htm) services. Use this API
to manage resources such as virtual cloud networks (VCNs), compute instances, and
block storage volumes.
"""), short_help=cli_util.override('blockstorage_root_group.short_help', """Core Services API"""))
@cli_util.help_option_group
def blockstorage_root_group():
pass
@click.command(cli_util.override('boot_volume_kms_key_group.command_name', 'boot-volume-kms-key'), cls=CommandGroupWithAlias, help="""Kms key id associated with this volume.""")
@cli_util.help_option_group
def boot_volume_kms_key_group():
pass
@click.command(cli_util.override('volume_group.command_name', 'volume'), cls=CommandGroupWithAlias, help="""A detachable block volume device that allows you to dynamically expand the storage capacity of an instance. For more information, see [Overview of Cloud Volume Storage].
To use any of the API operations, you must be authorized in an IAM policy. If you're not authorized, talk to an administrator. If you're an administrator who needs to write policies to give users access, see [Getting Started with Policies].
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.""")
@cli_util.help_option_group
def volume_group():
pass
@click.command(cli_util.override('boot_volume_backup_group.command_name', 'boot-volume-backup'), cls=CommandGroupWithAlias, help="""A point-in-time copy of a boot volume that can then be used to create a new boot volume or recover a boot volume. For more information, see [Overview of Boot Volume Backups] To use any of the API operations, you must be authorized in an IAM policy. If you're not authorized, talk to an administrator. If you're an administrator who needs to write policies to give users access, see [Getting Started with Policies].
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.""")
@cli_util.help_option_group
def boot_volume_backup_group():
pass
@click.command(cli_util.override('boot_volume_group.command_name', 'boot-volume'), cls=CommandGroupWithAlias, help="""A detachable boot volume device that contains the image used to boot a Compute instance. For more information, see [Overview of Boot Volumes].
To use any of the API operations, you must be authorized in an IAM policy. If you're not authorized, talk to an administrator. If you're an administrator who needs to write policies to give users access, see [Getting Started with Policies].
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.""")
@cli_util.help_option_group
def boot_volume_group():
pass
@click.command(cli_util.override('volume_backup_group.command_name', 'volume-backup'), cls=CommandGroupWithAlias, help="""A point-in-time copy of a volume that can then be used to create a new block volume or recover a block volume. For more information, see [Overview of Cloud Volume Storage].
To use any of the API operations, you must be authorized in an IAM policy. If you're not authorized, talk to an administrator. If you're an administrator who needs to write policies to give users access, see [Getting Started with Policies].
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.""")
@cli_util.help_option_group
def volume_backup_group():
pass
@click.command(cli_util.override('volume_group_backup_group.command_name', 'volume-group-backup'), cls=CommandGroupWithAlias, help="""A point-in-time copy of a volume group that can then be used to create a new volume group or restore a volume group. For more information, see [Volume Groups].
To use any of the API operations, you must be authorized in an IAM policy. If you're not authorized, talk to an administrator. If you're an administrator who needs to write policies to give users access, see [Getting Started with Policies].
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.""")
@cli_util.help_option_group
def volume_group_backup_group():
pass
@click.command(cli_util.override('volume_backup_policy_assignment_group.command_name', 'volume-backup-policy-assignment'), cls=CommandGroupWithAlias, help="""Specifies that a particular volume backup policy is assigned to an asset such as a volume.""")
@cli_util.help_option_group
def volume_backup_policy_assignment_group():
pass
@click.command(cli_util.override('volume_group_group.command_name', 'volume-group'), cls=CommandGroupWithAlias, help="""Specifies a volume group which is a collection of volumes. For more information, see [Volume Groups].
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.""")
@cli_util.help_option_group
def volume_group_group():
pass
@click.command(cli_util.override('volume_backup_policy_group.command_name', 'volume-backup-policy'), cls=CommandGroupWithAlias, help="""A policy for automatically creating volume backups according to a recurring schedule. Has a set of one or more schedules that control when and how backups are created.
**Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.""")
@cli_util.help_option_group
def volume_backup_policy_group():
pass
@click.command(cli_util.override('volume_kms_key_group.command_name', 'volume-kms-key'), cls=CommandGroupWithAlias, help="""The KMS key OCID associated with this volume.""")
@cli_util.help_option_group
def volume_kms_key_group():
pass
core_service_cli.core_service_group.add_command(blockstorage_root_group)
blockstorage_root_group.add_command(boot_volume_kms_key_group)
blockstorage_root_group.add_command(volume_group)
blockstorage_root_group.add_command(boot_volume_backup_group)
blockstorage_root_group.add_command(boot_volume_group)
blockstorage_root_group.add_command(volume_backup_group)
blockstorage_root_group.add_command(volume_group_backup_group)
blockstorage_root_group.add_command(volume_backup_policy_assignment_group)
blockstorage_root_group.add_command(volume_group_group)
blockstorage_root_group.add_command(volume_backup_policy_group)
blockstorage_root_group.add_command(volume_kms_key_group)
@boot_volume_backup_group.command(name=cli_util.override('change_boot_volume_backup_compartment.command_name', 'change-compartment'), help=u"""Change the compartment of a boot volume backup""")
@cli_util.option('--boot-volume-backup-id', required=True, help=u"""The OCID of the boot volume backup.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the new compartment""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def change_boot_volume_backup_compartment(ctx, from_json, boot_volume_backup_id, compartment_id):
if isinstance(boot_volume_backup_id, six.string_types) and len(boot_volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-backup-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
details = {}
details['compartmentId'] = compartment_id
client = cli_util.build_client('blockstorage', ctx)
result = client.change_boot_volume_backup_compartment(
boot_volume_backup_id=boot_volume_backup_id,
change_boot_volume_backup_compartment_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('change_boot_volume_compartment.command_name', 'change-compartment'), help=u"""Change the compartment of a boot volume""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the new compartment""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def change_boot_volume_compartment(ctx, from_json, boot_volume_id, compartment_id):
if isinstance(boot_volume_id, six.string_types) and len(boot_volume_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
details = {}
details['compartmentId'] = compartment_id
client = cli_util.build_client('blockstorage', ctx)
result = client.change_boot_volume_compartment(
boot_volume_id=boot_volume_id,
change_boot_volume_compartment_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_group.command(name=cli_util.override('change_volume_backup_compartment.command_name', 'change-compartment'), help=u"""Change the compartment of a volume backup""")
@cli_util.option('--volume-backup-id', required=True, help=u"""The OCID of the volume backup.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the new compartment""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def change_volume_backup_compartment(ctx, from_json, volume_backup_id, compartment_id):
if isinstance(volume_backup_id, six.string_types) and len(volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-backup-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
details = {}
details['compartmentId'] = compartment_id
client = cli_util.build_client('blockstorage', ctx)
result = client.change_volume_backup_compartment(
volume_backup_id=volume_backup_id,
change_volume_backup_compartment_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('change_volume_compartment.command_name', 'change-compartment'), help=u"""Change the compartment of a volume""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the new compartment""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def change_volume_compartment(ctx, from_json, volume_id, compartment_id):
if isinstance(volume_id, six.string_types) and len(volume_id.strip()) == 0:
raise click.UsageError('Parameter --volume-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
details = {}
details['compartmentId'] = compartment_id
client = cli_util.build_client('blockstorage', ctx)
result = client.change_volume_compartment(
volume_id=volume_id,
change_volume_compartment_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_backup_group.command(name=cli_util.override('change_volume_group_backup_compartment.command_name', 'change-compartment'), help=u"""Change the compartment of a volume group backup""")
@cli_util.option('--volume-group-backup-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group backup.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the new compartment""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def change_volume_group_backup_compartment(ctx, from_json, volume_group_backup_id, compartment_id):
if isinstance(volume_group_backup_id, six.string_types) and len(volume_group_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-backup-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
details = {}
details['compartmentId'] = compartment_id
client = cli_util.build_client('blockstorage', ctx)
result = client.change_volume_group_backup_compartment(
volume_group_backup_id=volume_group_backup_id,
change_volume_group_backup_compartment_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('change_volume_group_compartment.command_name', 'change-compartment'), help=u"""Change the compartment of a volume group""")
@cli_util.option('--volume-group-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the new compartment""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def change_volume_group_compartment(ctx, from_json, volume_group_id, compartment_id):
if isinstance(volume_group_id, six.string_types) and len(volume_group_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
details = {}
details['compartmentId'] = compartment_id
client = cli_util.build_client('blockstorage', ctx)
result = client.change_volume_group_compartment(
volume_group_id=volume_group_id,
change_volume_group_compartment_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_group.command(name=cli_util.override('copy_volume_backup.command_name', 'copy'), help=u"""Creates a volume backup copy in specified region. For general information about volume backups, see [Overview of Block Volume Service Backups]""")
@cli_util.option('--volume-backup-id', required=True, help=u"""The OCID of the volume backup.""")
@cli_util.option('--destination-region', required=True, help=u"""The name of the destination region.
Example: `us-ashburn-1`""")
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume backup. Does not have to be unique and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeBackup'})
@cli_util.wrap_exceptions
def copy_volume_backup(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_backup_id, destination_region, display_name):
if isinstance(volume_backup_id, six.string_types) and len(volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-backup-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
details = {}
details['destinationRegion'] = destination_region
if display_name is not None:
details['displayName'] = display_name
client = cli_util.build_client('blockstorage', ctx)
result = client.copy_volume_backup(
volume_backup_id=volume_backup_id,
copy_volume_backup_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_backup') and callable(getattr(client, 'get_volume_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_backup(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('create_boot_volume.command_name', 'create'), help=u"""Creates a new boot volume in the specified compartment from an existing boot volume or a boot volume backup. For general information about boot volumes, see [Boot Volumes]. You may optionally specify a *display name* for the volume, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the boot volume.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the boot volume.""")
@cli_util.option('--source-details', required=True, type=custom_types.CLI_COMPLEX_TYPE, help=u"""Specifies the boot volume source details for a new boot volume. The volume source is either another boot volume in the same availability domain or a boot volume backup. This is a mandatory field for a boot volume.""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--backup-policy-id', help=u"""If provided, specifies the ID of the boot volume backup policy to assign to the newly created boot volume. If omitted, no policy will be assigned.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--kms-key-id', help=u"""The OCID of the KMS key to be used as the master encryption key for the boot volume.""")
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size of the volume in GBs.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'source-details': {'module': 'core', 'class': 'BootVolumeSourceDetails'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'source-details': {'module': 'core', 'class': 'BootVolumeSourceDetails'}}, output_type={'module': 'core', 'class': 'BootVolume'})
@cli_util.wrap_exceptions
def create_boot_volume(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details, backup_policy_id, defined_tags, display_name, freeform_tags, kms_key_id, size_in_gbs):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails'] = cli_util.parse_json_parameter("source_details", source_details)
if backup_policy_id is not None:
details['backupPolicyId'] = backup_policy_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
client = cli_util.build_client('blockstorage', ctx)
result = client.create_boot_volume(
create_boot_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume') and callable(getattr(client, 'get_boot_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_boot_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('create_boot_volume_boot_volume_source_from_boot_volume_backup_details.command_name', 'create-boot-volume-boot-volume-source-from-boot-volume-backup-details'), help=u"""Creates a new boot volume in the specified compartment from an existing boot volume or a boot volume backup. For general information about boot volumes, see [Boot Volumes]. You may optionally specify a *display name* for the volume, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the boot volume.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the boot volume.""")
@cli_util.option('--source-details-id', required=True, help=u"""The OCID of the boot volume backup.""")
@cli_util.option('--backup-policy-id', help=u"""If provided, specifies the ID of the boot volume backup policy to assign to the newly created boot volume. If omitted, no policy will be assigned.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--kms-key-id', help=u"""The OCID of the KMS key to be used as the master encryption key for the boot volume.""")
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size of the volume in GBs.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'BootVolume'})
@cli_util.wrap_exceptions
def create_boot_volume_boot_volume_source_from_boot_volume_backup_details(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details_id, backup_policy_id, defined_tags, display_name, freeform_tags, kms_key_id, size_in_gbs):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails']['id'] = source_details_id
if backup_policy_id is not None:
details['backupPolicyId'] = backup_policy_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
details['sourceDetails']['type'] = 'bootVolumeBackup'
client = cli_util.build_client('blockstorage', ctx)
result = client.create_boot_volume(
create_boot_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume') and callable(getattr(client, 'get_boot_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_boot_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('create_boot_volume_boot_volume_source_from_boot_volume_details.command_name', 'create-boot-volume-boot-volume-source-from-boot-volume-details'), help=u"""Creates a new boot volume in the specified compartment from an existing boot volume or a boot volume backup. For general information about boot volumes, see [Boot Volumes]. You may optionally specify a *display name* for the volume, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the boot volume.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the boot volume.""")
@cli_util.option('--source-details-id', required=True, help=u"""The OCID of the boot volume.""")
@cli_util.option('--backup-policy-id', help=u"""If provided, specifies the ID of the boot volume backup policy to assign to the newly created boot volume. If omitted, no policy will be assigned.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--kms-key-id', help=u"""The OCID of the KMS key to be used as the master encryption key for the boot volume.""")
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size of the volume in GBs.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'BootVolume'})
@cli_util.wrap_exceptions
def create_boot_volume_boot_volume_source_from_boot_volume_details(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details_id, backup_policy_id, defined_tags, display_name, freeform_tags, kms_key_id, size_in_gbs):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails']['id'] = source_details_id
if backup_policy_id is not None:
details['backupPolicyId'] = backup_policy_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
details['sourceDetails']['type'] = 'bootVolume'
client = cli_util.build_client('blockstorage', ctx)
result = client.create_boot_volume(
create_boot_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume') and callable(getattr(client, 'get_boot_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_boot_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_backup_group.command(name=cli_util.override('create_boot_volume_backup.command_name', 'create'), help=u"""Creates a new boot volume backup of the specified boot volume. For general information about boot volume backups, see [Overview of Boot Volume Backups]
When the request is received, the backup object is in a REQUEST_RECEIVED state. When the data is imaged, it goes into a CREATING state. After the backup is fully uploaded to the cloud, it goes into an AVAILABLE state.""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume that needs to be backed up.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the boot volume backup. Does not have to be unique and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--type', type=custom_types.CliCaseInsensitiveChoice(["FULL", "INCREMENTAL"]), help=u"""The type of backup to create. If omitted, defaults to incremental.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'BootVolumeBackup'})
@cli_util.wrap_exceptions
def create_boot_volume_backup(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, boot_volume_id, defined_tags, display_name, freeform_tags, type):
kwargs = {}
details = {}
details['bootVolumeId'] = boot_volume_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if type is not None:
details['type'] = type
client = cli_util.build_client('blockstorage', ctx)
result = client.create_boot_volume_backup(
create_boot_volume_backup_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume_backup') and callable(getattr(client, 'get_boot_volume_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_boot_volume_backup(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('create_volume.command_name', 'create'), help=u"""Creates a new volume in the specified compartment. Volumes can be created in sizes ranging from 50 GB (51200 MB) to 32 TB (33554432 MB), in 1 GB (1024 MB) increments. By default, volumes are 1 TB (1048576 MB). For general information about block volumes, see [Overview of Block Volume Service].
A volume and instance can be in separate compartments but must be in the same availability domain. For information about access control and compartments, see [Overview of the IAM Service]. For information about availability domains, see [Regions and Availability Domains]. To get a list of availability domains, use the `ListAvailabilityDomains` operation in the Identity and Access Management Service API.
You may optionally specify a *display name* for the volume, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the volume.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the volume.""")
@cli_util.option('--backup-policy-id', help=u"""If provided, specifies the ID of the volume backup policy to assign to the newly created volume. If omitted, no policy will be assigned.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--kms-key-id', help=u"""The OCID of the KMS key to be used as the master encryption key for the volume.""")
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size of the volume in GBs.""")
@cli_util.option('--size-in-mbs', type=click.INT, help=u"""The size of the volume in MBs. The value must be a multiple of 1024. This field is deprecated. Use sizeInGBs instead.""")
@cli_util.option('--source-details', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Specifies the volume source details for a new Block volume. The volume source is either another Block volume in the same availability domain or a Block volume backup. This is an optional field. If not specified or set to null, the new Block volume will be empty. When specified, the new Block volume will contain data from the source volume or backup.""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--volume-backup-id', help=u"""The OCID of the volume backup from which the data should be restored on the newly created volume. This field is deprecated. Use the sourceDetails field instead to specify the backup for the volume.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'source-details': {'module': 'core', 'class': 'VolumeSourceDetails'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'source-details': {'module': 'core', 'class': 'VolumeSourceDetails'}}, output_type={'module': 'core', 'class': 'Volume'})
@cli_util.wrap_exceptions
def create_volume(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, backup_policy_id, defined_tags, display_name, freeform_tags, kms_key_id, size_in_gbs, size_in_mbs, source_details, volume_backup_id):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
if backup_policy_id is not None:
details['backupPolicyId'] = backup_policy_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
if size_in_mbs is not None:
details['sizeInMBs'] = size_in_mbs
if source_details is not None:
details['sourceDetails'] = cli_util.parse_json_parameter("source_details", source_details)
if volume_backup_id is not None:
details['volumeBackupId'] = volume_backup_id
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume(
create_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume') and callable(getattr(client, 'get_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('create_volume_volume_source_from_volume_details.command_name', 'create-volume-volume-source-from-volume-details'), help=u"""Creates a new volume in the specified compartment. Volumes can be created in sizes ranging from 50 GB (51200 MB) to 32 TB (33554432 MB), in 1 GB (1024 MB) increments. By default, volumes are 1 TB (1048576 MB). For general information about block volumes, see [Overview of Block Volume Service].
A volume and instance can be in separate compartments but must be in the same availability domain. For information about access control and compartments, see [Overview of the IAM Service]. For information about availability domains, see [Regions and Availability Domains]. To get a list of availability domains, use the `ListAvailabilityDomains` operation in the Identity and Access Management Service API.
You may optionally specify a *display name* for the volume, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the volume.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the volume.""")
@cli_util.option('--source-details-id', required=True, help=u"""The OCID of the volume.""")
@cli_util.option('--backup-policy-id', help=u"""If provided, specifies the ID of the volume backup policy to assign to the newly created volume. If omitted, no policy will be assigned.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--kms-key-id', help=u"""The OCID of the KMS key to be used as the master encryption key for the volume.""")
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size of the volume in GBs.""")
@cli_util.option('--size-in-mbs', type=click.INT, help=u"""The size of the volume in MBs. The value must be a multiple of 1024. This field is deprecated. Use sizeInGBs instead.""")
@cli_util.option('--volume-backup-id', help=u"""The OCID of the volume backup from which the data should be restored on the newly created volume. This field is deprecated. Use the sourceDetails field instead to specify the backup for the volume.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'Volume'})
@cli_util.wrap_exceptions
def create_volume_volume_source_from_volume_details(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details_id, backup_policy_id, defined_tags, display_name, freeform_tags, kms_key_id, size_in_gbs, size_in_mbs, volume_backup_id):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails']['id'] = source_details_id
if backup_policy_id is not None:
details['backupPolicyId'] = backup_policy_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
if size_in_mbs is not None:
details['sizeInMBs'] = size_in_mbs
if volume_backup_id is not None:
details['volumeBackupId'] = volume_backup_id
details['sourceDetails']['type'] = 'volume'
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume(
create_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume') and callable(getattr(client, 'get_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('create_volume_volume_source_from_volume_backup_details.command_name', 'create-volume-volume-source-from-volume-backup-details'), help=u"""Creates a new volume in the specified compartment. Volumes can be created in sizes ranging from 50 GB (51200 MB) to 32 TB (33554432 MB), in 1 GB (1024 MB) increments. By default, volumes are 1 TB (1048576 MB). For general information about block volumes, see [Overview of Block Volume Service].
A volume and instance can be in separate compartments but must be in the same availability domain. For information about access control and compartments, see [Overview of the IAM Service]. For information about availability domains, see [Regions and Availability Domains]. To get a list of availability domains, use the `ListAvailabilityDomains` operation in the Identity and Access Management Service API.
You may optionally specify a *display name* for the volume, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the volume.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the volume.""")
@cli_util.option('--source-details-id', required=True, help=u"""The OCID of the volume backup.""")
@cli_util.option('--backup-policy-id', help=u"""If provided, specifies the ID of the volume backup policy to assign to the newly created volume. If omitted, no policy will be assigned.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--kms-key-id', help=u"""The OCID of the KMS key to be used as the master encryption key for the volume.""")
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size of the volume in GBs.""")
@cli_util.option('--size-in-mbs', type=click.INT, help=u"""The size of the volume in MBs. The value must be a multiple of 1024. This field is deprecated. Use sizeInGBs instead.""")
@cli_util.option('--volume-backup-id', help=u"""The OCID of the volume backup from which the data should be restored on the newly created volume. This field is deprecated. Use the sourceDetails field instead to specify the backup for the volume.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'Volume'})
@cli_util.wrap_exceptions
def create_volume_volume_source_from_volume_backup_details(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details_id, backup_policy_id, defined_tags, display_name, freeform_tags, kms_key_id, size_in_gbs, size_in_mbs, volume_backup_id):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails']['id'] = source_details_id
if backup_policy_id is not None:
details['backupPolicyId'] = backup_policy_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
if size_in_mbs is not None:
details['sizeInMBs'] = size_in_mbs
if volume_backup_id is not None:
details['volumeBackupId'] = volume_backup_id
details['sourceDetails']['type'] = 'volumeBackup'
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume(
create_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume') and callable(getattr(client, 'get_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_backup_group.command(name=cli_util.override('create_volume_backup.command_name', 'create'), help=u"""Creates a new backup of the specified volume. For general information about volume backups, see [Overview of Block Volume Service Backups]
When the request is received, the backup object is in a REQUEST_RECEIVED state. When the data is imaged, it goes into a CREATING state. After the backup is fully uploaded to the cloud, it goes into an AVAILABLE state.""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume that needs to be backed up.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume backup. Does not have to be unique and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--type', type=custom_types.CliCaseInsensitiveChoice(["FULL", "INCREMENTAL"]), help=u"""The type of backup to create. If omitted, defaults to INCREMENTAL.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'VolumeBackup'})
@cli_util.wrap_exceptions
def create_volume_backup(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_id, defined_tags, display_name, freeform_tags, type):
kwargs = {}
details = {}
details['volumeId'] = volume_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if type is not None:
details['type'] = type
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume_backup(
create_volume_backup_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_backup') and callable(getattr(client, 'get_volume_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_backup(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_backup_policy_assignment_group.command(name=cli_util.override('create_volume_backup_policy_assignment.command_name', 'create'), help=u"""Assigns a policy to the specified asset, such as a volume. Note that a given asset can only have one policy assigned to it; if this method is called for an asset that previously has a different policy assigned, the prior assignment will be silently deleted.""")
@cli_util.option('--asset-id', required=True, help=u"""The OCID of the asset (e.g. a volume) to which to assign the policy.""")
@cli_util.option('--policy-id', required=True, help=u"""The OCID of the volume backup policy to assign to an asset.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeBackupPolicyAssignment'})
@cli_util.wrap_exceptions
def create_volume_backup_policy_assignment(ctx, from_json, asset_id, policy_id):
kwargs = {}
details = {}
details['assetId'] = asset_id
details['policyId'] = policy_id
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume_backup_policy_assignment(
create_volume_backup_policy_assignment_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('create_volume_group.command_name', 'create'), help=u"""Creates a new volume group in the specified compartment. A volume group is a collection of volumes and may be created from a list of volumes, cloning an existing volume group, or by restoring a volume group backup. A volume group can contain up to 64 volumes. You may optionally specify a *display name* for the volume group, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.
For more information, see [Volume Groups].""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the volume group.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the volume group.""")
@cli_util.option('--source-details', required=True, type=custom_types.CLI_COMPLEX_TYPE, help=u"""Specifies the volume group source details for a new volume group. The volume source is either another a list of volume ids in the same availability domain, another volume group or a volume group backup.""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume group. Does not have to be unique, and it's changeable.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'source-details': {'module': 'core', 'class': 'VolumeGroupSourceDetails'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'source-details': {'module': 'core', 'class': 'VolumeGroupSourceDetails'}}, output_type={'module': 'core', 'class': 'VolumeGroup'})
@cli_util.wrap_exceptions
def create_volume_group(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details, defined_tags, display_name, freeform_tags):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails'] = cli_util.parse_json_parameter("source_details", source_details)
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume_group(
create_volume_group_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group') and callable(getattr(client, 'get_volume_group')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_group(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('create_volume_group_volume_group_source_from_volume_group_details.command_name', 'create-volume-group-volume-group-source-from-volume-group-details'), help=u"""Creates a new volume group in the specified compartment. A volume group is a collection of volumes and may be created from a list of volumes, cloning an existing volume group, or by restoring a volume group backup. A volume group can contain up to 64 volumes. You may optionally specify a *display name* for the volume group, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.
For more information, see [Volume Groups].""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the volume group.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the volume group.""")
@cli_util.option('--source-details-volume-group-id', required=True, help=u"""The OCID of the volume group to clone from.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume group. Does not have to be unique, and it's changeable.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'VolumeGroup'})
@cli_util.wrap_exceptions
def create_volume_group_volume_group_source_from_volume_group_details(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details_volume_group_id, defined_tags, display_name, freeform_tags):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails']['volumeGroupId'] = source_details_volume_group_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
details['sourceDetails']['type'] = 'volumeGroupId'
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume_group(
create_volume_group_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group') and callable(getattr(client, 'get_volume_group')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_group(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('create_volume_group_volume_group_source_from_volumes_details.command_name', 'create-volume-group-volume-group-source-from-volumes-details'), help=u"""Creates a new volume group in the specified compartment. A volume group is a collection of volumes and may be created from a list of volumes, cloning an existing volume group, or by restoring a volume group backup. A volume group can contain up to 64 volumes. You may optionally specify a *display name* for the volume group, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.
For more information, see [Volume Groups].""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the volume group.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the volume group.""")
@cli_util.option('--source-details-volume-ids', required=True, type=custom_types.CLI_COMPLEX_TYPE, help=u"""OCIDs for the volumes in this volume group.""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume group. Does not have to be unique, and it's changeable.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'volume-ids': {'module': 'core', 'class': 'list[string]'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'volume-ids': {'module': 'core', 'class': 'list[string]'}}, output_type={'module': 'core', 'class': 'VolumeGroup'})
@cli_util.wrap_exceptions
def create_volume_group_volume_group_source_from_volumes_details(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details_volume_ids, defined_tags, display_name, freeform_tags):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails']['volumeIds'] = cli_util.parse_json_parameter("source_details_volume_ids", source_details_volume_ids)
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
details['sourceDetails']['type'] = 'volumeIds'
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume_group(
create_volume_group_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group') and callable(getattr(client, 'get_volume_group')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_group(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('create_volume_group_volume_group_source_from_volume_group_backup_details.command_name', 'create-volume-group-volume-group-source-from-volume-group-backup-details'), help=u"""Creates a new volume group in the specified compartment. A volume group is a collection of volumes and may be created from a list of volumes, cloning an existing volume group, or by restoring a volume group backup. A volume group can contain up to 64 volumes. You may optionally specify a *display name* for the volume group, which is simply a friendly name or description. It does not have to be unique, and you can change it. Avoid entering confidential information.
For more information, see [Volume Groups].""")
@cli_util.option('--availability-domain', required=True, help=u"""The availability domain of the volume group.""")
@cli_util.option('--compartment-id', required=True, help=u"""The OCID of the compartment that contains the volume group.""")
@cli_util.option('--source-details-volume-group-backup-id', required=True, help=u"""The OCID of the volume group backup to restore from.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume group. Does not have to be unique, and it's changeable.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'VolumeGroup'})
@cli_util.wrap_exceptions
def create_volume_group_volume_group_source_from_volume_group_backup_details(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, availability_domain, compartment_id, source_details_volume_group_backup_id, defined_tags, display_name, freeform_tags):
kwargs = {}
details = {}
details['availabilityDomain'] = availability_domain
details['compartmentId'] = compartment_id
details['sourceDetails']['volumeGroupBackupId'] = source_details_volume_group_backup_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
details['sourceDetails']['type'] = 'volumeGroupBackupId'
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume_group(
create_volume_group_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group') and callable(getattr(client, 'get_volume_group')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_group(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group_backup_group.command(name=cli_util.override('create_volume_group_backup.command_name', 'create'), help=u"""Creates a new backup volume group of the specified volume group. For more information, see [Volume Groups].""")
@cli_util.option('--volume-group-id', required=True, help=u"""The OCID of the volume group that needs to be backed up.""")
@cli_util.option('--compartment-id', help=u"""The OCID of the compartment that will contain the volume group backup. This parameter is optional, by default backup will be created in the same compartment and source volume group.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume group backup. Does not have to be unique and it's changeable.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--type', type=custom_types.CliCaseInsensitiveChoice(["FULL", "INCREMENTAL"]), help=u"""The type of backup to create. If omitted, defaults to incremental.""")
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "COMMITTED", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'VolumeGroupBackup'})
@cli_util.wrap_exceptions
def create_volume_group_backup(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_group_id, compartment_id, defined_tags, display_name, freeform_tags, type):
kwargs = {}
details = {}
details['volumeGroupId'] = volume_group_id
if compartment_id is not None:
details['compartmentId'] = compartment_id
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if type is not None:
details['type'] = type
client = cli_util.build_client('blockstorage', ctx)
result = client.create_volume_group_backup(
create_volume_group_backup_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group_backup') and callable(getattr(client, 'get_volume_group_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_group_backup(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('delete_boot_volume.command_name', 'delete'), help=u"""Deletes the specified boot volume. The volume cannot have an active connection to an instance. To disconnect the boot volume from a connected instance, see [Disconnecting From a Boot Volume]. **Warning:** All data on the boot volume will be permanently lost when the boot volume is deleted.""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_boot_volume(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, boot_volume_id, if_match):
if isinstance(boot_volume_id, six.string_types) and len(boot_volume_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_boot_volume(
boot_volume_id=boot_volume_id,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume') and callable(getattr(client, 'get_boot_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
oci.wait_until(client, client.get_boot_volume(boot_volume_id), 'lifecycle_state', wait_for_state, succeed_on_not_found=True, **wait_period_kwargs)
except oci.exceptions.ServiceError as e:
# We make an initial service call so we can pass the result to oci.wait_until(), however if we are waiting on the
# outcome of a delete operation it is possible that the resource is already gone and so the initial service call
# will result in an exception that reflects a HTTP 404. In this case, we can exit with success (rather than raising
# the exception) since this would have been the behaviour in the waiter anyway (as for delete we provide the argument
# succeed_on_not_found=True to the waiter).
#
# Any non-404 should still result in the exception being thrown.
if e.status == 404:
pass
else:
raise
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Please retrieve the resource to find its current state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_backup_group.command(name=cli_util.override('delete_boot_volume_backup.command_name', 'delete'), help=u"""Deletes a boot volume backup.""")
@cli_util.option('--boot-volume-backup-id', required=True, help=u"""The OCID of the boot volume backup.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_boot_volume_backup(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, boot_volume_backup_id, if_match):
if isinstance(boot_volume_backup_id, six.string_types) and len(boot_volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-backup-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_boot_volume_backup(
boot_volume_backup_id=boot_volume_backup_id,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume_backup') and callable(getattr(client, 'get_boot_volume_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
oci.wait_until(client, client.get_boot_volume_backup(boot_volume_backup_id), 'lifecycle_state', wait_for_state, succeed_on_not_found=True, **wait_period_kwargs)
except oci.exceptions.ServiceError as e:
# We make an initial service call so we can pass the result to oci.wait_until(), however if we are waiting on the
# outcome of a delete operation it is possible that the resource is already gone and so the initial service call
# will result in an exception that reflects a HTTP 404. In this case, we can exit with success (rather than raising
# the exception) since this would have been the behaviour in the waiter anyway (as for delete we provide the argument
# succeed_on_not_found=True to the waiter).
#
# Any non-404 should still result in the exception being thrown.
if e.status == 404:
pass
else:
raise
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Please retrieve the resource to find its current state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_kms_key_group.command(name=cli_util.override('delete_boot_volume_kms_key.command_name', 'delete'), help=u"""Removes the KMS key for the specified boot volume.""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_boot_volume_kms_key(ctx, from_json, boot_volume_id, if_match):
if isinstance(boot_volume_id, six.string_types) and len(boot_volume_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_boot_volume_kms_key(
boot_volume_id=boot_volume_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('delete_volume.command_name', 'delete'), help=u"""Deletes the specified volume. The volume cannot have an active connection to an instance. To disconnect the volume from a connected instance, see [Disconnecting From a Volume]. **Warning:** All data on the volume will be permanently lost when the volume is deleted.""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_volume(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_id, if_match):
if isinstance(volume_id, six.string_types) and len(volume_id.strip()) == 0:
raise click.UsageError('Parameter --volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_volume(
volume_id=volume_id,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume') and callable(getattr(client, 'get_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
oci.wait_until(client, client.get_volume(volume_id), 'lifecycle_state', wait_for_state, succeed_on_not_found=True, **wait_period_kwargs)
except oci.exceptions.ServiceError as e:
# We make an initial service call so we can pass the result to oci.wait_until(), however if we are waiting on the
# outcome of a delete operation it is possible that the resource is already gone and so the initial service call
# will result in an exception that reflects a HTTP 404. In this case, we can exit with success (rather than raising
# the exception) since this would have been the behaviour in the waiter anyway (as for delete we provide the argument
# succeed_on_not_found=True to the waiter).
#
# Any non-404 should still result in the exception being thrown.
if e.status == 404:
pass
else:
raise
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Please retrieve the resource to find its current state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_backup_group.command(name=cli_util.override('delete_volume_backup.command_name', 'delete'), help=u"""Deletes a volume backup.""")
@cli_util.option('--volume-backup-id', required=True, help=u"""The OCID of the volume backup.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_volume_backup(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_backup_id, if_match):
if isinstance(volume_backup_id, six.string_types) and len(volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-backup-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_volume_backup(
volume_backup_id=volume_backup_id,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_backup') and callable(getattr(client, 'get_volume_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
oci.wait_until(client, client.get_volume_backup(volume_backup_id), 'lifecycle_state', wait_for_state, succeed_on_not_found=True, **wait_period_kwargs)
except oci.exceptions.ServiceError as e:
# We make an initial service call so we can pass the result to oci.wait_until(), however if we are waiting on the
# outcome of a delete operation it is possible that the resource is already gone and so the initial service call
# will result in an exception that reflects a HTTP 404. In this case, we can exit with success (rather than raising
# the exception) since this would have been the behaviour in the waiter anyway (as for delete we provide the argument
# succeed_on_not_found=True to the waiter).
#
# Any non-404 should still result in the exception being thrown.
if e.status == 404:
pass
else:
raise
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Please retrieve the resource to find its current state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_backup_policy_assignment_group.command(name=cli_util.override('delete_volume_backup_policy_assignment.command_name', 'delete'), help=u"""Deletes a volume backup policy assignment (i.e. unassigns the policy from an asset).""")
@cli_util.option('--policy-assignment-id', required=True, help=u"""The OCID of the volume backup policy assignment.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_volume_backup_policy_assignment(ctx, from_json, policy_assignment_id, if_match):
if isinstance(policy_assignment_id, six.string_types) and len(policy_assignment_id.strip()) == 0:
raise click.UsageError('Parameter --policy-assignment-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_volume_backup_policy_assignment(
policy_assignment_id=policy_assignment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('delete_volume_group.command_name', 'delete'), help=u"""Deletes the specified volume group. Individual volumes are not deleted, only the volume group is deleted. For more information, see [Volume Groups].""")
@cli_util.option('--volume-group-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_volume_group(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_group_id, if_match):
if isinstance(volume_group_id, six.string_types) and len(volume_group_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_volume_group(
volume_group_id=volume_group_id,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group') and callable(getattr(client, 'get_volume_group')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
oci.wait_until(client, client.get_volume_group(volume_group_id), 'lifecycle_state', wait_for_state, succeed_on_not_found=True, **wait_period_kwargs)
except oci.exceptions.ServiceError as e:
# We make an initial service call so we can pass the result to oci.wait_until(), however if we are waiting on the
# outcome of a delete operation it is possible that the resource is already gone and so the initial service call
# will result in an exception that reflects a HTTP 404. In this case, we can exit with success (rather than raising
# the exception) since this would have been the behaviour in the waiter anyway (as for delete we provide the argument
# succeed_on_not_found=True to the waiter).
#
# Any non-404 should still result in the exception being thrown.
if e.status == 404:
pass
else:
raise
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Please retrieve the resource to find its current state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group_backup_group.command(name=cli_util.override('delete_volume_group_backup.command_name', 'delete'), help=u"""Deletes a volume group backup. This operation deletes all the backups in the volume group. For more information, see [Volume Groups].""")
@cli_util.option('--volume-group-backup-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group backup.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "COMMITTED", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_volume_group_backup(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_group_backup_id, if_match):
if isinstance(volume_group_backup_id, six.string_types) and len(volume_group_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-backup-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_volume_group_backup(
volume_group_backup_id=volume_group_backup_id,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group_backup') and callable(getattr(client, 'get_volume_group_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
oci.wait_until(client, client.get_volume_group_backup(volume_group_backup_id), 'lifecycle_state', wait_for_state, succeed_on_not_found=True, **wait_period_kwargs)
except oci.exceptions.ServiceError as e:
# We make an initial service call so we can pass the result to oci.wait_until(), however if we are waiting on the
# outcome of a delete operation it is possible that the resource is already gone and so the initial service call
# will result in an exception that reflects a HTTP 404. In this case, we can exit with success (rather than raising
# the exception) since this would have been the behaviour in the waiter anyway (as for delete we provide the argument
# succeed_on_not_found=True to the waiter).
#
# Any non-404 should still result in the exception being thrown.
if e.status == 404:
pass
else:
raise
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Please retrieve the resource to find its current state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_kms_key_group.command(name=cli_util.override('delete_volume_kms_key.command_name', 'delete'), help=u"""Removes the KMS key for the specified volume.""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_volume_kms_key(ctx, from_json, volume_id, if_match):
if isinstance(volume_id, six.string_types) and len(volume_id.strip()) == 0:
raise click.UsageError('Parameter --volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.delete_volume_kms_key(
volume_id=volume_id,
**kwargs
)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('get_boot_volume.command_name', 'get'), help=u"""Gets information for the specified boot volume.""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'BootVolume'})
@cli_util.wrap_exceptions
def get_boot_volume(ctx, from_json, boot_volume_id):
if isinstance(boot_volume_id, six.string_types) and len(boot_volume_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_boot_volume(
boot_volume_id=boot_volume_id,
**kwargs
)
cli_util.render_response(result, ctx)
@boot_volume_backup_group.command(name=cli_util.override('get_boot_volume_backup.command_name', 'get'), help=u"""Gets information for the specified boot volume backup.""")
@cli_util.option('--boot-volume-backup-id', required=True, help=u"""The OCID of the boot volume backup.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'BootVolumeBackup'})
@cli_util.wrap_exceptions
def get_boot_volume_backup(ctx, from_json, boot_volume_backup_id):
if isinstance(boot_volume_backup_id, six.string_types) and len(boot_volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-backup-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_boot_volume_backup(
boot_volume_backup_id=boot_volume_backup_id,
**kwargs
)
cli_util.render_response(result, ctx)
@boot_volume_kms_key_group.command(name=cli_util.override('get_boot_volume_kms_key.command_name', 'get'), help=u"""Gets the KMS key ID for the specified boot volume.""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'BootVolumeKmsKey'})
@cli_util.wrap_exceptions
def get_boot_volume_kms_key(ctx, from_json, boot_volume_id, if_match):
if isinstance(boot_volume_id, six.string_types) and len(boot_volume_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.get_boot_volume_kms_key(
boot_volume_id=boot_volume_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('get_volume.command_name', 'get'), help=u"""Gets information for the specified volume.""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'Volume'})
@cli_util.wrap_exceptions
def get_volume(ctx, from_json, volume_id):
if isinstance(volume_id, six.string_types) and len(volume_id.strip()) == 0:
raise click.UsageError('Parameter --volume-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume(
volume_id=volume_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_group.command(name=cli_util.override('get_volume_backup.command_name', 'get'), help=u"""Gets information for the specified volume backup.""")
@cli_util.option('--volume-backup-id', required=True, help=u"""The OCID of the volume backup.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeBackup'})
@cli_util.wrap_exceptions
def get_volume_backup(ctx, from_json, volume_backup_id):
if isinstance(volume_backup_id, six.string_types) and len(volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-backup-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume_backup(
volume_backup_id=volume_backup_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_policy_group.command(name=cli_util.override('get_volume_backup_policy.command_name', 'get'), help=u"""Gets information for the specified volume backup policy.""")
@cli_util.option('--policy-id', required=True, help=u"""The OCID of the volume backup policy.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeBackupPolicy'})
@cli_util.wrap_exceptions
def get_volume_backup_policy(ctx, from_json, policy_id):
if isinstance(policy_id, six.string_types) and len(policy_id.strip()) == 0:
raise click.UsageError('Parameter --policy-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume_backup_policy(
policy_id=policy_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_policy_assignment_group.command(name=cli_util.override('get_volume_backup_policy_asset_assignment.command_name', 'get-volume-backup-policy-asset-assignment'), help=u"""Gets the volume backup policy assignment for the specified asset. Note that the assetId query parameter is required, and that the returned list will contain at most one item (since any given asset can only have one policy assigned to it).""")
@cli_util.option('--asset-id', required=True, help=u"""The OCID of an asset (e.g. a volume).""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[VolumeBackupPolicyAssignment]'})
@cli_util.wrap_exceptions
def get_volume_backup_policy_asset_assignment(ctx, from_json, asset_id, limit, page):
kwargs = {}
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume_backup_policy_asset_assignment(
asset_id=asset_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_policy_assignment_group.command(name=cli_util.override('get_volume_backup_policy_assignment.command_name', 'get'), help=u"""Gets information for the specified volume backup policy assignment.""")
@cli_util.option('--policy-assignment-id', required=True, help=u"""The OCID of the volume backup policy assignment.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeBackupPolicyAssignment'})
@cli_util.wrap_exceptions
def get_volume_backup_policy_assignment(ctx, from_json, policy_assignment_id):
if isinstance(policy_assignment_id, six.string_types) and len(policy_assignment_id.strip()) == 0:
raise click.UsageError('Parameter --policy-assignment-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume_backup_policy_assignment(
policy_assignment_id=policy_assignment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('get_volume_group.command_name', 'get'), help=u"""Gets information for the specified volume group. For more information, see [Volume Groups].""")
@cli_util.option('--volume-group-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeGroup'})
@cli_util.wrap_exceptions
def get_volume_group(ctx, from_json, volume_group_id):
if isinstance(volume_group_id, six.string_types) and len(volume_group_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume_group(
volume_group_id=volume_group_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_backup_group.command(name=cli_util.override('get_volume_group_backup.command_name', 'get'), help=u"""Gets information for the specified volume group backup. For more information, see [Volume Groups].""")
@cli_util.option('--volume-group-backup-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group backup.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeGroupBackup'})
@cli_util.wrap_exceptions
def get_volume_group_backup(ctx, from_json, volume_group_backup_id):
if isinstance(volume_group_backup_id, six.string_types) and len(volume_group_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-backup-id cannot be whitespace or empty string')
kwargs = {}
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume_group_backup(
volume_group_backup_id=volume_group_backup_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_kms_key_group.command(name=cli_util.override('get_volume_kms_key.command_name', 'get'), help=u"""Gets the KMS key ID for the specified volume.""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeKmsKey'})
@cli_util.wrap_exceptions
def get_volume_kms_key(ctx, from_json, volume_id, if_match):
if isinstance(volume_id, six.string_types) and len(volume_id.strip()) == 0:
raise click.UsageError('Parameter --volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
client = cli_util.build_client('blockstorage', ctx)
result = client.get_volume_kms_key(
volume_id=volume_id,
**kwargs
)
cli_util.render_response(result, ctx)
@boot_volume_backup_group.command(name=cli_util.override('list_boot_volume_backups.command_name', 'list'), help=u"""Lists the boot volume backups in the specified compartment. You can filter the results by boot volume.""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment.""")
@cli_util.option('--boot-volume-id', help=u"""The OCID of the boot volume.""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--display-name', help=u"""A filter to return only resources that match the given display name exactly.""")
@cli_util.option('--sort-by', type=custom_types.CliCaseInsensitiveChoice(["TIMECREATED", "DISPLAYNAME"]), help=u"""The field to sort by. You can provide one sort order (`sortOrder`). Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. The DISPLAYNAME sort order is case sensitive.
**Note:** In general, some \"List\" operations (for example, `ListInstances`) let you optionally filter by availability domain if the scope of the resource type is within a single availability domain. If you call one of these \"List\" operations without specifying an availability domain, the resources are grouped by availability domain, then sorted.""")
@cli_util.option('--sort-order', type=custom_types.CliCaseInsensitiveChoice(["ASC", "DESC"]), help=u"""The sort order to use, either ascending (`ASC`) or descending (`DESC`). The DISPLAYNAME sort order is case sensitive.""")
@cli_util.option('--lifecycle-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help=u"""A filter to only return resources that match the given lifecycle state. The state value is case-insensitive.""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[BootVolumeBackup]'})
@cli_util.wrap_exceptions
def list_boot_volume_backups(ctx, from_json, all_pages, page_size, compartment_id, boot_volume_id, limit, page, display_name, sort_by, sort_order, lifecycle_state):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
kwargs = {}
if boot_volume_id is not None:
kwargs['boot_volume_id'] = boot_volume_id
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
if display_name is not None:
kwargs['display_name'] = display_name
if sort_by is not None:
kwargs['sort_by'] = sort_by
if sort_order is not None:
kwargs['sort_order'] = sort_order
if lifecycle_state is not None:
kwargs['lifecycle_state'] = lifecycle_state
client = cli_util.build_client('blockstorage', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_boot_volume_backups,
compartment_id=compartment_id,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_boot_volume_backups,
limit,
page_size,
compartment_id=compartment_id,
**kwargs
)
else:
result = client.list_boot_volume_backups(
compartment_id=compartment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('list_boot_volumes.command_name', 'list'), help=u"""Lists the boot volumes in the specified compartment and availability domain.""")
@cli_util.option('--availability-domain', required=True, help=u"""The name of the availability domain.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment.""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--volume-group-id', help=u"""The OCID of the volume group.""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[BootVolume]'})
@cli_util.wrap_exceptions
def list_boot_volumes(ctx, from_json, all_pages, page_size, availability_domain, compartment_id, limit, page, volume_group_id):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
kwargs = {}
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
if volume_group_id is not None:
kwargs['volume_group_id'] = volume_group_id
client = cli_util.build_client('blockstorage', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_boot_volumes,
availability_domain=availability_domain,
compartment_id=compartment_id,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_boot_volumes,
limit,
page_size,
availability_domain=availability_domain,
compartment_id=compartment_id,
**kwargs
)
else:
result = client.list_boot_volumes(
availability_domain=availability_domain,
compartment_id=compartment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_policy_group.command(name=cli_util.override('list_volume_backup_policies.command_name', 'list'), help=u"""Lists all volume backup policies available to the caller.""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[VolumeBackupPolicy]'})
@cli_util.wrap_exceptions
def list_volume_backup_policies(ctx, from_json, all_pages, page_size, limit, page):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
kwargs = {}
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
client = cli_util.build_client('blockstorage', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_volume_backup_policies,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_volume_backup_policies,
limit,
page_size,
**kwargs
)
else:
result = client.list_volume_backup_policies(
**kwargs
)
cli_util.render_response(result, ctx)
@volume_backup_group.command(name=cli_util.override('list_volume_backups.command_name', 'list'), help=u"""Lists the volume backups in the specified compartment. You can filter the results by volume.""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment.""")
@cli_util.option('--volume-id', help=u"""The OCID of the volume.""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--display-name', help=u"""A filter to return only resources that match the given display name exactly.""")
@cli_util.option('--source-volume-backup-id', help=u"""A filter to return only resources that originated from the given source volume backup.""")
@cli_util.option('--sort-by', type=custom_types.CliCaseInsensitiveChoice(["TIMECREATED", "DISPLAYNAME"]), help=u"""The field to sort by. You can provide one sort order (`sortOrder`). Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. The DISPLAYNAME sort order is case sensitive.
**Note:** In general, some \"List\" operations (for example, `ListInstances`) let you optionally filter by availability domain if the scope of the resource type is within a single availability domain. If you call one of these \"List\" operations without specifying an availability domain, the resources are grouped by availability domain, then sorted.""")
@cli_util.option('--sort-order', type=custom_types.CliCaseInsensitiveChoice(["ASC", "DESC"]), help=u"""The sort order to use, either ascending (`ASC`) or descending (`DESC`). The DISPLAYNAME sort order is case sensitive.""")
@cli_util.option('--lifecycle-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help=u"""A filter to only return resources that match the given lifecycle state. The state value is case-insensitive.""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[VolumeBackup]'})
@cli_util.wrap_exceptions
def list_volume_backups(ctx, from_json, all_pages, page_size, compartment_id, volume_id, limit, page, display_name, source_volume_backup_id, sort_by, sort_order, lifecycle_state):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
kwargs = {}
if volume_id is not None:
kwargs['volume_id'] = volume_id
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
if display_name is not None:
kwargs['display_name'] = display_name
if source_volume_backup_id is not None:
kwargs['source_volume_backup_id'] = source_volume_backup_id
if sort_by is not None:
kwargs['sort_by'] = sort_by
if sort_order is not None:
kwargs['sort_order'] = sort_order
if lifecycle_state is not None:
kwargs['lifecycle_state'] = lifecycle_state
client = cli_util.build_client('blockstorage', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_volume_backups,
compartment_id=compartment_id,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_volume_backups,
limit,
page_size,
compartment_id=compartment_id,
**kwargs
)
else:
result = client.list_volume_backups(
compartment_id=compartment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_backup_group.command(name=cli_util.override('list_volume_group_backups.command_name', 'list'), help=u"""Lists the volume group backups in the specified compartment. You can filter the results by volume group. For more information, see [Volume Groups].""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment.""")
@cli_util.option('--volume-group-id', help=u"""The OCID of the volume group.""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--display-name', help=u"""A filter to return only resources that match the given display name exactly.""")
@cli_util.option('--sort-by', type=custom_types.CliCaseInsensitiveChoice(["TIMECREATED", "DISPLAYNAME"]), help=u"""The field to sort by. You can provide one sort order (`sortOrder`). Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. The DISPLAYNAME sort order is case sensitive.
**Note:** In general, some \"List\" operations (for example, `ListInstances`) let you optionally filter by availability domain if the scope of the resource type is within a single availability domain. If you call one of these \"List\" operations without specifying an availability domain, the resources are grouped by availability domain, then sorted.""")
@cli_util.option('--sort-order', type=custom_types.CliCaseInsensitiveChoice(["ASC", "DESC"]), help=u"""The sort order to use, either ascending (`ASC`) or descending (`DESC`). The DISPLAYNAME sort order is case sensitive.""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[VolumeGroupBackup]'})
@cli_util.wrap_exceptions
def list_volume_group_backups(ctx, from_json, all_pages, page_size, compartment_id, volume_group_id, limit, page, display_name, sort_by, sort_order):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
kwargs = {}
if volume_group_id is not None:
kwargs['volume_group_id'] = volume_group_id
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
if display_name is not None:
kwargs['display_name'] = display_name
if sort_by is not None:
kwargs['sort_by'] = sort_by
if sort_order is not None:
kwargs['sort_order'] = sort_order
client = cli_util.build_client('blockstorage', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_volume_group_backups,
compartment_id=compartment_id,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_volume_group_backups,
limit,
page_size,
compartment_id=compartment_id,
**kwargs
)
else:
result = client.list_volume_group_backups(
compartment_id=compartment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('list_volume_groups.command_name', 'list'), help=u"""Lists the volume groups in the specified compartment and availability domain. For more information, see [Volume Groups].""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment.""")
@cli_util.option('--availability-domain', help=u"""The name of the availability domain.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--display-name', help=u"""A filter to return only resources that match the given display name exactly.""")
@cli_util.option('--sort-by', type=custom_types.CliCaseInsensitiveChoice(["TIMECREATED", "DISPLAYNAME"]), help=u"""The field to sort by. You can provide one sort order (`sortOrder`). Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. The DISPLAYNAME sort order is case sensitive.
**Note:** In general, some \"List\" operations (for example, `ListInstances`) let you optionally filter by availability domain if the scope of the resource type is within a single availability domain. If you call one of these \"List\" operations without specifying an availability domain, the resources are grouped by availability domain, then sorted.""")
@cli_util.option('--sort-order', type=custom_types.CliCaseInsensitiveChoice(["ASC", "DESC"]), help=u"""The sort order to use, either ascending (`ASC`) or descending (`DESC`). The DISPLAYNAME sort order is case sensitive.""")
@cli_util.option('--lifecycle-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help=u"""A filter to only return resources that match the given lifecycle state. The state value is case-insensitive.""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[VolumeGroup]'})
@cli_util.wrap_exceptions
def list_volume_groups(ctx, from_json, all_pages, page_size, compartment_id, availability_domain, limit, page, display_name, sort_by, sort_order, lifecycle_state):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
if sort_by and not availability_domain and not all_pages:
raise click.UsageError('You must provide an --availability-domain when doing a --sort-by, unless you specify the --all parameter')
kwargs = {}
if availability_domain is not None:
kwargs['availability_domain'] = availability_domain
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
if display_name is not None:
kwargs['display_name'] = display_name
if sort_by is not None:
kwargs['sort_by'] = sort_by
if sort_order is not None:
kwargs['sort_order'] = sort_order
if lifecycle_state is not None:
kwargs['lifecycle_state'] = lifecycle_state
client = cli_util.build_client('blockstorage', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_volume_groups,
compartment_id=compartment_id,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_volume_groups,
limit,
page_size,
compartment_id=compartment_id,
**kwargs
)
else:
result = client.list_volume_groups(
compartment_id=compartment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('list_volumes.command_name', 'list'), help=u"""Lists the volumes in the specified compartment and availability domain.""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment.""")
@cli_util.option('--availability-domain', help=u"""The name of the availability domain.
Example: `Uocm:PHX-AD-1`""")
@cli_util.option('--limit', type=click.INT, help=u"""For list pagination. The maximum number of results per page, or items to return in a paginated \"List\" call. For important details about how pagination works, see [List Pagination].
Example: `50`""")
@cli_util.option('--page', help=u"""For list pagination. The value of the `opc-next-page` response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--display-name', help=u"""A filter to return only resources that match the given display name exactly.""")
@cli_util.option('--sort-by', type=custom_types.CliCaseInsensitiveChoice(["TIMECREATED", "DISPLAYNAME"]), help=u"""The field to sort by. You can provide one sort order (`sortOrder`). Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. The DISPLAYNAME sort order is case sensitive.
**Note:** In general, some \"List\" operations (for example, `ListInstances`) let you optionally filter by availability domain if the scope of the resource type is within a single availability domain. If you call one of these \"List\" operations without specifying an availability domain, the resources are grouped by availability domain, then sorted.""")
@cli_util.option('--sort-order', type=custom_types.CliCaseInsensitiveChoice(["ASC", "DESC"]), help=u"""The sort order to use, either ascending (`ASC`) or descending (`DESC`). The DISPLAYNAME sort order is case sensitive.""")
@cli_util.option('--volume-group-id', help=u"""The OCID of the volume group.""")
@cli_util.option('--lifecycle-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help=u"""A filter to only return resources that match the given lifecycle state. The state value is case-insensitive.""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'list[Volume]'})
@cli_util.wrap_exceptions
def list_volumes(ctx, from_json, all_pages, page_size, compartment_id, availability_domain, limit, page, display_name, sort_by, sort_order, volume_group_id, lifecycle_state):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
if sort_by and not availability_domain and not all_pages:
raise click.UsageError('You must provide an --availability-domain when doing a --sort-by, unless you specify the --all parameter')
kwargs = {}
if availability_domain is not None:
kwargs['availability_domain'] = availability_domain
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
if display_name is not None:
kwargs['display_name'] = display_name
if sort_by is not None:
kwargs['sort_by'] = sort_by
if sort_order is not None:
kwargs['sort_order'] = sort_order
if volume_group_id is not None:
kwargs['volume_group_id'] = volume_group_id
if lifecycle_state is not None:
kwargs['lifecycle_state'] = lifecycle_state
client = cli_util.build_client('blockstorage', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_volumes,
compartment_id=compartment_id,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_volumes,
limit,
page_size,
compartment_id=compartment_id,
**kwargs
)
else:
result = client.list_volumes(
compartment_id=compartment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@boot_volume_group.command(name=cli_util.override('update_boot_volume.command_name', 'update'), help=u"""Updates the specified boot volume's display name, defined tags, and free-form tags.""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size to resize the volume to in GBs. Has to be larger than the current size.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.option('--force', help="""Perform update without prompting for confirmation.""", is_flag=True)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'BootVolume'})
@cli_util.wrap_exceptions
def update_boot_volume(ctx, from_json, force, wait_for_state, max_wait_seconds, wait_interval_seconds, boot_volume_id, defined_tags, display_name, freeform_tags, size_in_gbs, if_match):
if isinstance(boot_volume_id, six.string_types) and len(boot_volume_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-id cannot be whitespace or empty string')
if not force:
if defined_tags or freeform_tags:
if not click.confirm("WARNING: Updates to defined-tags and freeform-tags will replace any existing values. Are you sure you want to continue?"):
ctx.abort()
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
client = cli_util.build_client('blockstorage', ctx)
result = client.update_boot_volume(
boot_volume_id=boot_volume_id,
update_boot_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume') and callable(getattr(client, 'get_boot_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_boot_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_backup_group.command(name=cli_util.override('update_boot_volume_backup.command_name', 'update'), help=u"""Updates the display name for the specified boot volume backup. Avoid entering confidential information.""")
@cli_util.option('--boot-volume-backup-id', required=True, help=u"""The OCID of the boot volume backup.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A friendly user-specified name for the boot volume backup. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.option('--force', help="""Perform update without prompting for confirmation.""", is_flag=True)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'BootVolumeBackup'})
@cli_util.wrap_exceptions
def update_boot_volume_backup(ctx, from_json, force, wait_for_state, max_wait_seconds, wait_interval_seconds, boot_volume_backup_id, defined_tags, display_name, freeform_tags, if_match):
if isinstance(boot_volume_backup_id, six.string_types) and len(boot_volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-backup-id cannot be whitespace or empty string')
if not force:
if defined_tags or freeform_tags:
if not click.confirm("WARNING: Updates to defined-tags and freeform-tags will replace any existing values. Are you sure you want to continue?"):
ctx.abort()
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
client = cli_util.build_client('blockstorage', ctx)
result = client.update_boot_volume_backup(
boot_volume_backup_id=boot_volume_backup_id,
update_boot_volume_backup_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_boot_volume_backup') and callable(getattr(client, 'get_boot_volume_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_boot_volume_backup(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@boot_volume_kms_key_group.command(name=cli_util.override('update_boot_volume_kms_key.command_name', 'update'), help=u"""Updates the KMS key ID for the specified volume.""")
@cli_util.option('--boot-volume-id', required=True, help=u"""The OCID of the boot volume.""")
@cli_util.option('--kms-key-id', help=u"""The OCID of the new KMS key which will be used to protect the specified volume. This key has to be a valid KMS key OCID, and the user must have key delegation policy to allow them to access this key. Even if the new KMS key is the same as the previous KMS key ID, the Block Volume service will use it to regenerate a new volume encryption key.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'BootVolumeKmsKey'})
@cli_util.wrap_exceptions
def update_boot_volume_kms_key(ctx, from_json, boot_volume_id, kms_key_id, if_match):
if isinstance(boot_volume_id, six.string_types) and len(boot_volume_id.strip()) == 0:
raise click.UsageError('Parameter --boot-volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
client = cli_util.build_client('blockstorage', ctx)
result = client.update_boot_volume_kms_key(
boot_volume_id=boot_volume_id,
update_boot_volume_kms_key_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
@volume_group.command(name=cli_util.override('update_volume.command_name', 'update'), help=u"""Updates the specified volume's display name. Avoid entering confidential information.""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name. Does not have to be unique, and it's changeable. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--size-in-gbs', type=click.INT, help=u"""The size to resize the volume to in GBs. Has to be larger than the current size.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.option('--force', help="""Perform update without prompting for confirmation.""", is_flag=True)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "RESTORING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'Volume'})
@cli_util.wrap_exceptions
def update_volume(ctx, from_json, force, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_id, defined_tags, display_name, freeform_tags, size_in_gbs, if_match):
if isinstance(volume_id, six.string_types) and len(volume_id.strip()) == 0:
raise click.UsageError('Parameter --volume-id cannot be whitespace or empty string')
if not force:
if defined_tags or freeform_tags:
if not click.confirm("WARNING: Updates to defined-tags and freeform-tags will replace any existing values. Are you sure you want to continue?"):
ctx.abort()
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if size_in_gbs is not None:
details['sizeInGBs'] = size_in_gbs
client = cli_util.build_client('blockstorage', ctx)
result = client.update_volume(
volume_id=volume_id,
update_volume_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume') and callable(getattr(client, 'get_volume')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_backup_group.command(name=cli_util.override('update_volume_backup.command_name', 'update'), help=u"""Updates the display name for the specified volume backup. Avoid entering confidential information.""")
@cli_util.option('--volume-backup-id', required=True, help=u"""The OCID of the volume backup.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A friendly user-specified name for the volume backup. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.option('--force', help="""Perform update without prompting for confirmation.""", is_flag=True)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'VolumeBackup'})
@cli_util.wrap_exceptions
def update_volume_backup(ctx, from_json, force, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_backup_id, defined_tags, display_name, freeform_tags, if_match):
if isinstance(volume_backup_id, six.string_types) and len(volume_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-backup-id cannot be whitespace or empty string')
if not force:
if defined_tags or freeform_tags:
if not click.confirm("WARNING: Updates to defined-tags and freeform-tags will replace any existing values. Are you sure you want to continue?"):
ctx.abort()
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
client = cli_util.build_client('blockstorage', ctx)
result = client.update_volume_backup(
volume_backup_id=volume_backup_id,
update_volume_backup_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_backup') and callable(getattr(client, 'get_volume_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_backup(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group_group.command(name=cli_util.override('update_volume_group.command_name', 'update'), help=u"""Updates the set of volumes in a volume group along with the display name. Use this operation to add or remove volumes in a volume group. Specify the full list of volume IDs to include in the volume group. If the volume ID is not specified in the call, it will be removed from the volume group. Avoid entering confidential information.
For more information, see [Volume Groups].""")
@cli_util.option('--volume-group-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A user-friendly name for the volume group.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--volume-ids', type=custom_types.CLI_COMPLEX_TYPE, help=u"""OCIDs for the volumes in this volume group.""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.option('--force', help="""Perform update without prompting for confirmation.""", is_flag=True)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["PROVISIONING", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'volume-ids': {'module': 'core', 'class': 'list[string]'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}, 'volume-ids': {'module': 'core', 'class': 'list[string]'}}, output_type={'module': 'core', 'class': 'VolumeGroup'})
@cli_util.wrap_exceptions
def update_volume_group(ctx, from_json, force, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_group_id, defined_tags, display_name, freeform_tags, volume_ids, if_match):
if isinstance(volume_group_id, six.string_types) and len(volume_group_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-id cannot be whitespace or empty string')
if not force:
if defined_tags or freeform_tags or volume_ids:
if not click.confirm("WARNING: Updates to defined-tags and freeform-tags and volume-ids will replace any existing values. Are you sure you want to continue?"):
ctx.abort()
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if volume_ids is not None:
details['volumeIds'] = cli_util.parse_json_parameter("volume_ids", volume_ids)
client = cli_util.build_client('blockstorage', ctx)
result = client.update_volume_group(
volume_group_id=volume_group_id,
update_volume_group_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group') and callable(getattr(client, 'get_volume_group')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_group(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_group_backup_group.command(name=cli_util.override('update_volume_group_backup.command_name', 'update'), help=u"""Updates the display name for the specified volume group backup. For more information, see [Volume Groups].""")
@cli_util.option('--volume-group-backup-id', required=True, help=u"""The Oracle Cloud ID (OCID) that uniquely identifies the volume group backup.""")
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--display-name', help=u"""A friendly user-specified name for the volume group backup.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.option('--force', help="""Perform update without prompting for confirmation.""", is_flag=True)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "COMMITTED", "AVAILABLE", "TERMINATING", "TERMINATED", "FAULTY", "REQUEST_RECEIVED"]), help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'defined-tags': {'module': 'core', 'class': 'dict(str, dict(str, object))'}, 'freeform-tags': {'module': 'core', 'class': 'dict(str, string)'}}, output_type={'module': 'core', 'class': 'VolumeGroupBackup'})
@cli_util.wrap_exceptions
def update_volume_group_backup(ctx, from_json, force, wait_for_state, max_wait_seconds, wait_interval_seconds, volume_group_backup_id, defined_tags, display_name, freeform_tags, if_match):
if isinstance(volume_group_backup_id, six.string_types) and len(volume_group_backup_id.strip()) == 0:
raise click.UsageError('Parameter --volume-group-backup-id cannot be whitespace or empty string')
if not force:
if defined_tags or freeform_tags:
if not click.confirm("WARNING: Updates to defined-tags and freeform-tags will replace any existing values. Are you sure you want to continue?"):
ctx.abort()
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if defined_tags is not None:
details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
if display_name is not None:
details['displayName'] = display_name
if freeform_tags is not None:
details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
client = cli_util.build_client('blockstorage', ctx)
result = client.update_volume_group_backup(
volume_group_backup_id=volume_group_backup_id,
update_volume_group_backup_details=details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_volume_group_backup') and callable(getattr(client, 'get_volume_group_backup')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_volume_group_backup(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@volume_kms_key_group.command(name=cli_util.override('update_volume_kms_key.command_name', 'update'), help=u"""Updates the KMS key ID for the specified volume.""")
@cli_util.option('--volume-id', required=True, help=u"""The OCID of the volume.""")
@cli_util.option('--kms-key-id', help=u"""The OCID of the new KMS key which will be used to protect the specified volume. This key has to be a valid KMS key OCID, and the user must have key delegation policy to allow them to access this key. Even if the new KMS key is the same as the previous KMS key ID, the Block Volume service will use it to regenerate a new volume encryption key.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'core', 'class': 'VolumeKmsKey'})
@cli_util.wrap_exceptions
def update_volume_kms_key(ctx, from_json, volume_id, kms_key_id, if_match):
if isinstance(volume_id, six.string_types) and len(volume_id.strip()) == 0:
raise click.UsageError('Parameter --volume-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
details = {}
if kms_key_id is not None:
details['kmsKeyId'] = kms_key_id
client = cli_util.build_client('blockstorage', ctx)
result = client.update_volume_kms_key(
volume_id=volume_id,
update_volume_kms_key_details=details,
**kwargs
)
cli_util.render_response(result, ctx)
| 71.542295
| 694
| 0.728071
| 31,179
| 218,204
| 4.902915
| 0.01857
| 0.03503
| 0.028489
| 0.014836
| 0.976503
| 0.9714
| 0.96183
| 0.954111
| 0.945731
| 0.931444
| 0
| 0.002853
| 0.164617
| 218,204
| 3,049
| 695
| 71.565759
| 0.835773
| 0.027566
| 0
| 0.842915
| 0
| 0.112146
| 0.457393
| 0.029008
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027126
| false
| 0.029555
| 0.010931
| 0
| 0.038057
| 0.000405
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92da891874db4b4e17a4134d71687df1ba569834
| 27,270
|
py
|
Python
|
stubs.min/System/__init___parts/Math.py
|
ricardyn/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2021-02-02T13:39:16.000Z
|
2021-02-02T13:39:16.000Z
|
stubs.min/System/__init___parts/Math.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/System/__init___parts/Math.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
class Math(object):
""" Provides constants and static methods for trigonometric,logarithmic,and other common mathematical functions. """
@staticmethod
def Abs(value):
"""
Abs(value: Single) -> Single
Returns the absolute value of a single-precision floating-point number.
value: A number that is greater than or equal to System.Single.MinValue,but less than
or equal to System.Single.MaxValue.
Abs(value: float) -> float
Returns the absolute value of a double-precision floating-point number.
value: A number that is greater than or equal to System.Double.MinValue,but less than
or equal to System.Double.MaxValue.
Abs(value: Decimal) -> Decimal
Returns the absolute value of a System.Decimal number.
value: A number that is greater than or equal to System.Decimal.MinValue,but less
than or equal to System.Decimal.MaxValue.
Abs(value: Int64) -> Int64
Returns the absolute value of a 64-bit signed integer.
value: A number that is greater than System.Int64.MinValue,but less than or equal to
System.Int64.MaxValue.
Abs(value: SByte) -> SByte
Returns the absolute value of an 8-bit signed integer.
value: A number that is greater than System.SByte.MinValue,but less than or equal to
System.SByte.MaxValue.
Abs(value: Int16) -> Int16
Returns the absolute value of a 16-bit signed integer.
value: A number that is greater than System.Int16.MinValue,but less than or equal to
System.Int16.MaxValue.
Abs(value: int) -> int
Returns the absolute value of a 32-bit signed integer.
value: A number that is greater than System.Int32.MinValue,but less than or equal to
System.Int32.MaxValue.
"""
pass
@staticmethod
def Acos(d):
"""
Acos(d: float) -> float
Returns the angle whose cosine is the specified number.
d: A number representing a cosine,where d must be greater than or equal to -1,
but less than or equal to 1.
-1 or d > 1 or d equals System.Double.NaN.
"""
pass
@staticmethod
def Asin(d):
"""
Asin(d: float) -> float
Returns the angle whose sine is the specified number.
d: A number representing a sine,where d must be greater than or equal to -1,but
less than or equal to 1.
if d < -1 or d > 1 or d equals System.Double.NaN.
"""
pass
@staticmethod
def Atan(d):
"""
Atan(d: float) -> float
Returns the angle whose tangent is the specified number.
d: A number representing a tangent.
to double precision (1.5707963267949) if d equals
System.Double.PositiveInfinity.
"""
pass
@staticmethod
def Atan2(y,x):
"""
Atan2(y: float,x: float) -> float
Returns the angle whose tangent is the quotient of two specified numbers.
y: The y coordinate of a point.
x: The x coordinate of a point.
(x,y) is a point in the Cartesian plane. Observe the following: For (x,y) in
the boundaries of the quadrants,the return value is the following:If y is 0
System.Double.NaN,or if x and y are either System.Double.PositiveInfinity or
System.Double.NegativeInfinity,the method returns System.Double.NaN.
"""
pass
@staticmethod
def BigMul(a,b):
"""
BigMul(a: int,b: int) -> Int64
Produces the full product of two 32-bit numbers.
a: The first number to multiply.
b: The second number to multiply.
Returns: The number containing the product of the specified numbers.
"""
pass
@staticmethod
def Ceiling(*__args):
"""
Ceiling(a: float) -> float
Returns the smallest integral value that is greater than or equal to the
specified double-precision floating-point number.
a: A double-precision floating-point number.
Returns: The smallest integral value that is greater than or equal to a. If a is equal
to System.Double.NaN,System.Double.NegativeInfinity,or
System.Double.PositiveInfinity,that value is returned. Note that this method
returns a System.Double instead of an integral type.
Ceiling(d: Decimal) -> Decimal
Returns the smallest integral value that is greater than or equal to the
specified decimal number.
d: A decimal number.
Returns: The smallest integral value that is greater than or equal to d. Note that this
method returns a System.Decimal instead of an integral type.
"""
pass
@staticmethod
def Cos(d):
"""
Cos(d: float) -> float
Returns the cosine of the specified angle.
d: An angle,measured in radians.
Returns: The cosine of d. If d is equal to System.Double.NaN,
System.Double.NegativeInfinity,or System.Double.PositiveInfinity,this method
returns System.Double.NaN.
"""
pass
@staticmethod
def Cosh(value):
"""
Cosh(value: float) -> float
Returns the hyperbolic cosine of the specified angle.
value: An angle,measured in radians.
Returns: The hyperbolic cosine of value. If value is equal to
System.Double.NegativeInfinity or System.Double.PositiveInfinity,
System.Double.PositiveInfinity is returned. If value is equal to
System.Double.NaN,System.Double.NaN is returned.
"""
pass
@staticmethod
def DivRem(a,b,result):
"""
DivRem(a: Int64,b: Int64) -> (Int64,Int64)
Calculates the quotient of two 64-bit signed integers and also returns the
remainder in an output parameter.
a: The dividend.
b: The divisor.
Returns: The quotient of the specified numbers.
DivRem(a: int,b: int) -> (int,int)
Calculates the quotient of two 32-bit signed integers and also returns the
remainder in an output parameter.
a: The dividend.
b: The divisor.
Returns: The quotient of the specified numbers.
"""
pass
@staticmethod
def Exp(d):
"""
Exp(d: float) -> float
Returns e raised to the specified power.
d: A number specifying a power.
Returns: The number e raised to the power d. If d equals System.Double.NaN or
System.Double.PositiveInfinity,that value is returned. If d equals
System.Double.NegativeInfinity,0 is returned.
"""
pass
@staticmethod
def Floor(d):
"""
Floor(d: float) -> float
Returns the largest integer less than or equal to the specified
double-precision floating-point number.
d: A double-precision floating-point number.
Returns: The largest integer less than or equal to d. If d is equal to
System.Double.NaN,System.Double.NegativeInfinity,or
System.Double.PositiveInfinity,that value is returned.
Floor(d: Decimal) -> Decimal
Returns the largest integer less than or equal to the specified decimal number.
d: A decimal number.
Returns: The largest integer less than or equal to d.
"""
pass
@staticmethod
def IEEERemainder(x,y):
"""
IEEERemainder(x: float,y: float) -> float
Returns the remainder resulting from the division of a specified number by
another specified number.
x: A dividend.
y: A divisor.
Returns: A number equal to x - (y Q),where Q is the quotient of x / y rounded to the
nearest integer (if x / y falls halfway between two integers,the even integer
is returned).If x - (y Q) is zero,the value +0 is returned if x is positive,
or -0 if x is negative.If y=0,System.Double.NaN is returned.
"""
pass
@staticmethod
def Log(*__args):
"""
Log(a: float,newBase: float) -> float
Returns the logarithm of a specified number in a specified base.
a: A number whose logarithm is to be found.
newBase: The base of the logarithm.
Returns: One of the values in the following table. (+Infinity denotes
System.Double.PositiveInfinity,-Infinity denotes
System.Double.NegativeInfinity,and NaN denotes
System.Double.NaN.)anewBaseReturn valuea> 0(0 <newBase< 1) -or-(newBase>
1)lognewBase(a)a< 0(any value)NaN(any value)newBase< 0NaNa != 1newBase=0NaNa
!= 1newBase=+InfinityNaNa=NaN(any value)NaN(any value)newBase=NaNNaN(any
value)newBase=1NaNa=00 <newBase< 1 +Infinitya=0newBase> 1-Infinitya=
+Infinity0 <newBase< 1-Infinitya= +InfinitynewBase> 1+Infinitya=1newBase=
00a=1newBase=+Infinity0
Log(d: float) -> float
Returns the natural (base e) logarithm of a specified number.
d: A number whose logarithm is to be found.
Returns: One of the values in the following table. d parameterReturn value Positive The
natural logarithm of d; that is,ln d,or log edZero
System.Double.NegativeInfinityNegative System.Double.NaNEqual to
System.Double.NaNSystem.Double.NaNEqual to
System.Double.PositiveInfinitySystem.Double.PositiveInfinity
"""
pass
@staticmethod
def Log10(d):
"""
Log10(d: float) -> float
Returns the base 10 logarithm of a specified number.
d: A number whose logarithm is to be found.
Returns: One of the values in the following table. d parameter Return value Positive The
base 10 log of d; that is,log 10d. Zero System.Double.NegativeInfinityNegative
System.Double.NaNEqual to System.Double.NaNSystem.Double.NaNEqual to
System.Double.PositiveInfinitySystem.Double.PositiveInfinity
"""
pass
@staticmethod
def Max(val1,val2):
"""
Max(val1: UInt64,val2: UInt64) -> UInt64
Returns the larger of two 64-bit unsigned integers.
val1: The first of two 64-bit unsigned integers to compare.
val2: The second of two 64-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: Int64,val2: Int64) -> Int64
Returns the larger of two 64-bit signed integers.
val1: The first of two 64-bit signed integers to compare.
val2: The second of two 64-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: Single,val2: Single) -> Single
Returns the larger of two single-precision floating-point numbers.
val1: The first of two single-precision floating-point numbers to compare.
val2: The second of two single-precision floating-point numbers to compare.
Returns: Parameter val1 or val2,whichever is larger. If val1,or val2,or both val1 and
val2 are equal to System.Single.NaN,System.Single.NaN is returned.
Max(val1: Decimal,val2: Decimal) -> Decimal
Returns the larger of two decimal numbers.
val1: The first of two decimal numbers to compare.
val2: The second of two decimal numbers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: float,val2: float) -> float
Returns the larger of two double-precision floating-point numbers.
val1: The first of two double-precision floating-point numbers to compare.
val2: The second of two double-precision floating-point numbers to compare.
Returns: Parameter val1 or val2,whichever is larger. If val1,val2,or both val1 and
val2 are equal to System.Double.NaN,System.Double.NaN is returned.
Max(val1: UInt32,val2: UInt32) -> UInt32
Returns the larger of two 32-bit unsigned integers.
val1: The first of two 32-bit unsigned integers to compare.
val2: The second of two 32-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: Byte,val2: Byte) -> Byte
Returns the larger of two 8-bit unsigned integers.
val1: The first of two 8-bit unsigned integers to compare.
val2: The second of two 8-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: SByte,val2: SByte) -> SByte
Returns the larger of two 8-bit signed integers.
val1: The first of two 8-bit signed integers to compare.
val2: The second of two 8-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: Int16,val2: Int16) -> Int16
Returns the larger of two 16-bit signed integers.
val1: The first of two 16-bit signed integers to compare.
val2: The second of two 16-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: int,val2: int) -> int
Returns the larger of two 32-bit signed integers.
val1: The first of two 32-bit signed integers to compare.
val2: The second of two 32-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
Max(val1: UInt16,val2: UInt16) -> UInt16
Returns the larger of two 16-bit unsigned integers.
val1: The first of two 16-bit unsigned integers to compare.
val2: The second of two 16-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is larger.
"""
pass
@staticmethod
def Min(val1,val2):
"""
Min(val1: UInt64,val2: UInt64) -> UInt64
Returns the smaller of two 64-bit unsigned integers.
val1: The first of two 64-bit unsigned integers to compare.
val2: The second of two 64-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: Int64,val2: Int64) -> Int64
Returns the smaller of two 64-bit signed integers.
val1: The first of two 64-bit signed integers to compare.
val2: The second of two 64-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: Single,val2: Single) -> Single
Returns the smaller of two single-precision floating-point numbers.
val1: The first of two single-precision floating-point numbers to compare.
val2: The second of two single-precision floating-point numbers to compare.
Returns: Parameter val1 or val2,whichever is smaller. If val1,val2,or both val1 and
val2 are equal to System.Single.NaN,System.Single.NaN is returned.
Min(val1: Decimal,val2: Decimal) -> Decimal
Returns the smaller of two decimal numbers.
val1: The first of two decimal numbers to compare.
val2: The second of two decimal numbers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: float,val2: float) -> float
Returns the smaller of two double-precision floating-point numbers.
val1: The first of two double-precision floating-point numbers to compare.
val2: The second of two double-precision floating-point numbers to compare.
Returns: Parameter val1 or val2,whichever is smaller. If val1,val2,or both val1 and
val2 are equal to System.Double.NaN,System.Double.NaN is returned.
Min(val1: UInt32,val2: UInt32) -> UInt32
Returns the smaller of two 32-bit unsigned integers.
val1: The first of two 32-bit unsigned integers to compare.
val2: The second of two 32-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: Byte,val2: Byte) -> Byte
Returns the smaller of two 8-bit unsigned integers.
val1: The first of two 8-bit unsigned integers to compare.
val2: The second of two 8-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: SByte,val2: SByte) -> SByte
Returns the smaller of two 8-bit signed integers.
val1: The first of two 8-bit signed integers to compare.
val2: The second of two 8-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: Int16,val2: Int16) -> Int16
Returns the smaller of two 16-bit signed integers.
val1: The first of two 16-bit signed integers to compare.
val2: The second of two 16-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: int,val2: int) -> int
Returns the smaller of two 32-bit signed integers.
val1: The first of two 32-bit signed integers to compare.
val2: The second of two 32-bit signed integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
Min(val1: UInt16,val2: UInt16) -> UInt16
Returns the smaller of two 16-bit unsigned integers.
val1: The first of two 16-bit unsigned integers to compare.
val2: The second of two 16-bit unsigned integers to compare.
Returns: Parameter val1 or val2,whichever is smaller.
"""
pass
@staticmethod
def Pow(x,y):
"""
Pow(x: float,y: float) -> float
Returns a specified number raised to the specified power.
x: A double-precision floating-point number to be raised to a power.
y: A double-precision floating-point number that specifies a power.
Returns: The number x raised to the power y.
"""
pass
@staticmethod
def Round(*__args):
"""
Round(d: Decimal,decimals: int) -> Decimal
Rounds a decimal value to a specified number of fractional digits.
d: A decimal number to be rounded.
decimals: The number of decimal places in the return value.
Returns: The number nearest to d that contains a number of fractional digits equal to
decimals.
Round(d: Decimal) -> Decimal
Rounds a decimal value to the nearest integral value.
d: A decimal number to be rounded.
Returns: The integer nearest parameter d. If the fractional component of d is halfway
between two integers,one of which is even and the other odd,the even number
is returned. Note that this method returns a System.Decimal instead of an
integral type.
Round(d: Decimal,decimals: int,mode: MidpointRounding) -> Decimal
Rounds a decimal value to a specified number of fractional digits. A parameter
specifies how to round the value if it is midway between two other numbers.
d: A decimal number to be rounded.
decimals: The number of decimal places in the return value.
mode: Specification for how to round d if it is midway between two other numbers.
Returns: The number nearest to d that contains a number of fractional digits equal to
decimals. If d has fewer fractional digits than decimals,d is returned
unchanged.
Round(d: Decimal,mode: MidpointRounding) -> Decimal
Rounds a decimal value to the nearest integer. A parameter specifies how to
round the value if it is midway between two other numbers.
d: A decimal number to be rounded.
mode: Specification for how to round d if it is midway between two other numbers.
Returns: The integer nearest d. If d is halfway between two numbers,one of which is
even and the other odd,then mode determines which of the two is returned.
Round(value: float,digits: int) -> float
Rounds a double-precision floating-point value to a specified number of
fractional digits.
value: A double-precision floating-point number to be rounded.
digits: The number of fractional digits in the return value.
Returns: The number nearest to value that contains a number of fractional digits equal
to digits.
Round(a: float) -> float
Rounds a double-precision floating-point value to the nearest integral value.
a: A double-precision floating-point number to be rounded.
Returns: The integer nearest a. If the fractional component of a is halfway between two
integers,one of which is even and the other odd,then the even number is
returned. Note that this method returns a System.Double instead of an integral
type.
Round(value: float,digits: int,mode: MidpointRounding) -> float
Rounds a double-precision floating-point value to the specified number of
fractional digits. A parameter specifies how to round the value if it is midway
between two other numbers.
value: A double-precision floating-point number to be rounded.
digits: The number of fractional digits in the return value.
mode: Specification for how to round value if it is midway between two other numbers.
Returns: The number nearest to value that has a number of fractional digits equal to
digits. If value has fewer fractional digits than digits,value is returned
unchanged.
Round(value: float,mode: MidpointRounding) -> float
Rounds a double-precision floating-point value to the nearest integer. A
parameter specifies how to round the value if it is midway between two other
numbers.
value: A double-precision floating-point number to be rounded.
mode: Specification for how to round value if it is midway between two other numbers.
Returns: The integer nearest value. If value is halfway between two integers,one of
which is even and the other odd,then mode determines which of the two is
returned.
"""
pass
@staticmethod
def Sign(value):
"""
Sign(value: Single) -> int
Returns a value indicating the sign of a single-precision floating-point number.
value: A signed number.
Returns: A number that indicates the sign of value,as shown in the following
table.Return value Meaning -1 value is less than zero. 0 value is equal to
zero. 1 value is greater than zero.
Sign(value: float) -> int
Returns a value indicating the sign of a double-precision floating-point number.
value: A signed number.
Returns: A number that indicates the sign of value,as shown in the following
table.Return value Meaning -1 value is less than zero. 0 value is equal to
zero. 1 value is greater than zero.
Sign(value: Decimal) -> int
Returns a value indicating the sign of a decimal number.
value: A signed decimal number.
Returns: A number that indicates the sign of value,as shown in the following
table.Return value Meaning -1 value is less than zero. 0 value is equal to
zero. 1 value is greater than zero.
Sign(value: Int64) -> int
Returns a value indicating the sign of a 64-bit signed integer.
value: A signed number.
Returns: A number that indicates the sign of value,as shown in the following
table.Return value Meaning -1 value is less than zero. 0 value is equal to
zero. 1 value is greater than zero.
Sign(value: SByte) -> int
Returns a value indicating the sign of an 8-bit signed integer.
value: A signed number.
Returns: A number that indicates the sign of value,as shown in the following
table.Return value Meaning -1 value is less than zero. 0 value is equal to
zero. 1 value is greater than zero.
Sign(value: Int16) -> int
Returns a value indicating the sign of a 16-bit signed integer.
value: A signed number.
Returns: A number that indicates the sign of value,as shown in the following
table.Return value Meaning -1 value is less than zero. 0 value is equal to
zero. 1 value is greater than zero.
Sign(value: int) -> int
Returns a value indicating the sign of a 32-bit signed integer.
value: A signed number.
Returns: A number that indicates the sign of value,as shown in the following
table.Return value Meaning -1 value is less than zero. 0 value is equal to
zero. 1 value is greater than zero.
"""
pass
@staticmethod
def Sin(a):
"""
Sin(a: float) -> float
Returns the sine of the specified angle.
a: An angle,measured in radians.
Returns: The sine of a. If a is equal to System.Double.NaN,
System.Double.NegativeInfinity,or System.Double.PositiveInfinity,this method
returns System.Double.NaN.
"""
pass
@staticmethod
def Sinh(value):
"""
Sinh(value: float) -> float
Returns the hyperbolic sine of the specified angle.
value: An angle,measured in radians.
Returns: The hyperbolic sine of value. If value is equal to
System.Double.NegativeInfinity,System.Double.PositiveInfinity,or
System.Double.NaN,this method returns a System.Double equal to value.
"""
pass
@staticmethod
def Sqrt(d):
"""
Sqrt(d: float) -> float
Returns the square root of a specified number.
d: A number.
Returns: One of the values in the following table. d parameter Return value Zero,or
positive The positive square root of d. Negative System.Double.NaNEquals
System.Double.NaNSystem.Double.NaNEquals
System.Double.PositiveInfinitySystem.Double.PositiveInfinity
"""
pass
@staticmethod
def Tan(a):
"""
Tan(a: float) -> float
Returns the tangent of the specified angle.
a: An angle,measured in radians.
Returns: The tangent of a. If a is equal to System.Double.NaN,
System.Double.NegativeInfinity,or System.Double.PositiveInfinity,this method
returns System.Double.NaN.
"""
pass
@staticmethod
def Tanh(value):
"""
Tanh(value: float) -> float
Returns the hyperbolic tangent of the specified angle.
value: An angle,measured in radians.
Returns: The hyperbolic tangent of value. If value is equal to
System.Double.NegativeInfinity,this method returns -1. If value is equal to
System.Double.PositiveInfinity,this method returns 1. If value is equal to
System.Double.NaN,this method returns System.Double.NaN.
"""
pass
@staticmethod
def Truncate(d):
"""
Truncate(d: float) -> float
Calculates the integral part of a specified double-precision floating-point
number.
d: A number to truncate.
Returns: The integral part of d; that is,the number that remains after any fractional
digits have been discarded,or one of the values listed in the following table.
dReturn
valueSystem.Double.NaNSystem.Double.NaNSystem.Double.NegativeInfinitySystem.Doub
le.NegativeInfinitySystem.Double.PositiveInfinitySystem.Double.PositiveInfinity
Truncate(d: Decimal) -> Decimal
Calculates the integral part of a specified decimal number.
d: A number to truncate.
Returns: The integral part of d; that is,the number that remains after any fractional
digits have been discarded.
"""
pass
E=2.7182818284590451
PI=3.1415926535897931
__all__=[
'Abs',
'Acos',
'Asin',
'Atan',
'Atan2',
'BigMul',
'Ceiling',
'Cos',
'Cosh',
'DivRem',
'E',
'Exp',
'Floor',
'IEEERemainder',
'Log',
'Log10',
'Max',
'Min',
'PI',
'Pow',
'Round',
'Sign',
'Sin',
'Sinh',
'Sqrt',
'Tan',
'Tanh',
'Truncate',
]
| 35.507813
| 118
| 0.676384
| 3,906
| 27,270
| 4.719662
| 0.066564
| 0.040683
| 0.029509
| 0.034934
| 0.833523
| 0.784161
| 0.751505
| 0.692596
| 0.640683
| 0.602658
| 0
| 0.023329
| 0.258086
| 27,270
| 767
| 119
| 35.554107
| 0.887851
| 0.864943
| 0
| 0.468468
| 0
| 0
| 0.082937
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.234234
| false
| 0.234234
| 0
| 0
| 0.27027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
136a9ecf326f653eb63bd6476ddc5300198cdca4
| 3,433
|
py
|
Python
|
test/test_gits_sync.py
|
oaaky/GITS
|
2254f298df158b5d085015c4cf9b542b059b3a6c
|
[
"MIT"
] | 2
|
2020-11-11T08:45:07.000Z
|
2021-09-02T18:36:21.000Z
|
test/test_gits_sync.py
|
oaaky/GITS
|
2254f298df158b5d085015c4cf9b542b059b3a6c
|
[
"MIT"
] | 24
|
2020-10-01T16:55:05.000Z
|
2020-10-27T02:51:25.000Z
|
test/test_gits_sync.py
|
oaaky/GITS
|
2254f298df158b5d085015c4cf9b542b059b3a6c
|
[
"MIT"
] | 15
|
2020-10-02T03:43:30.000Z
|
2021-10-01T03:48:32.000Z
|
import argparse
import os
import sys
sys.path.insert(1, os.getcwd())
from gits_sync import gits_sync
from mock import patch, Mock
def parse_args(args):
parser = argparse.ArgumentParser()
return parser.parse_args(args)
@patch("argparse.ArgumentParser.parse_args",
return_value=argparse.Namespace(source="branch name"))
@patch("subprocess.Popen")
@patch("helper.get_current_branch", return_value="current branch")
@patch("helper.get_trunk_branch_name", return_value="main branch")
def test_gits_sync_happy_case_source_branch(mock_main_branch, mock_curr_branch, mock_var, mock_args):
"""
Function to test gits sync, success case when source branch is given
"""
mocked_pipe = Mock()
attrs = {'communicate.return_value': (b'', 'error'), 'returncode': 0}
mocked_pipe.configure_mock(**attrs)
mock_var.return_value = mocked_pipe
mock_args = parse_args(mock_args)
test_result = gits_sync(mock_args)
if test_result:
assert True, "Normal Case"
else:
assert False
@patch("argparse.ArgumentParser.parse_args",
return_value=argparse.Namespace(source=None))
@patch("subprocess.Popen")
@patch("helper.get_current_branch", return_value="current branch")
@patch("helper.get_trunk_branch_name", return_value="main branch")
def test_gits_sync_happy_case_no_source_branch(mock_main_branch, mock_curr_branch, mock_var, mock_args):
"""
Function to test gits sync, success case when source branch is not given
"""
mocked_pipe = Mock()
attrs = {'communicate.return_value': (b'', 'error'), 'returncode': 0}
mocked_pipe.configure_mock(**attrs)
mock_var.return_value = mocked_pipe
mock_args = parse_args(mock_args)
test_result = gits_sync(mock_args)
if test_result:
assert True, "Normal Case"
else:
assert False
@patch("argparse.ArgumentParser.parse_args",
return_value=argparse.Namespace(source="branch name"))
@patch("subprocess.Popen")
@patch("helper.get_current_branch", return_value="current branch")
@patch("helper.get_trunk_branch_name", return_value="main branch")
def test_gits_sync_sad_case_uncommitted_changes(mock_main_branch, mock_curr_branch, mock_var, mock_args):
"""
Function to test gits sync, failure case when there are uncommitted changes
"""
mocked_pipe = Mock()
attrs = {'communicate.return_value': (b'anything', 'error'), 'returncode': 0}
mocked_pipe.configure_mock(**attrs)
mock_var.return_value = mocked_pipe
mock_args = parse_args(mock_args)
test_result = gits_sync(mock_args)
if not test_result:
assert True
else:
assert False
@patch("argparse.ArgumentParser.parse_args",
return_value=argparse.Namespace())
@patch("subprocess.Popen")
@patch("helper.get_current_branch", return_value="current branch")
@patch("helper.get_trunk_branch_name", return_value="main branch")
def test_gits_sync_sad_case_no_arguments(mock_main_branch, mock_curr_branch, mock_var, mock_args):
"""
Function to test gits sync, failure case when no arguments are given
"""
mocked_pipe = Mock()
attrs = {'communicate.return_value': (b'', 'error'), 'returncode': 0}
mocked_pipe.configure_mock(**attrs)
mock_var.return_value = mocked_pipe
mock_args = parse_args(mock_args)
test_result = gits_sync(mock_args)
if not test_result:
assert True, "Normal Case"
else:
assert False
| 33.656863
| 105
| 0.7291
| 466
| 3,433
| 5.060086
| 0.148069
| 0.093299
| 0.047498
| 0.054283
| 0.891009
| 0.891009
| 0.891009
| 0.891009
| 0.873198
| 0.862595
| 0
| 0.001733
| 0.159336
| 3,433
| 101
| 106
| 33.990099
| 0.815315
| 0.083309
| 0
| 0.767123
| 0
| 0
| 0.236876
| 0.143876
| 0
| 0
| 0
| 0
| 0.109589
| 1
| 0.068493
| false
| 0
| 0.068493
| 0
| 0.150685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1392fe3e1be64eaca6abf26910b770843a5e6c74
| 48,371
|
py
|
Python
|
graphql/language/tests/test_visitor.py
|
ThanksBoomerang/graphql-core-legacy
|
6e2fbccdec655ce9122b84d3808c14242c4e6b96
|
[
"MIT"
] | 8
|
2020-03-23T21:34:02.000Z
|
2021-11-12T11:27:45.000Z
|
graphql/language/tests/test_visitor.py
|
ThanksBoomerang/graphql-core-legacy
|
6e2fbccdec655ce9122b84d3808c14242c4e6b96
|
[
"MIT"
] | 17
|
2020-03-14T22:22:29.000Z
|
2022-03-16T19:26:37.000Z
|
graphql/language/tests/test_visitor.py
|
ThanksBoomerang/graphql-core-legacy
|
6e2fbccdec655ce9122b84d3808c14242c4e6b96
|
[
"MIT"
] | 17
|
2020-03-23T12:06:23.000Z
|
2022-02-13T05:33:32.000Z
|
from graphql.language.ast import (
Document,
Field,
Name,
OperationDefinition,
SelectionSet,
)
from graphql.language.parser import parse
from graphql.language.printer import print_ast
from graphql.language.visitor import (
BREAK,
REMOVE,
ParallelVisitor,
TypeInfoVisitor,
Visitor,
visit,
)
from graphql.type import get_named_type, is_composite_type
from graphql.utils.type_info import TypeInfo
from ...validation.tests.utils import test_schema
from .fixtures import KITCHEN_SINK
from graphql.language.ast import Document
from graphql.language.ast import OperationDefinition
from graphql.language.ast import SelectionSet
from typing import Any
from typing import Optional
from typing import Union
from graphql.language.ast import Field
from graphql.language.ast import Name
from graphql.language.visitor import _Falsey
from typing import List
from graphql.language.ast import Argument
from graphql.language.ast import IntValue
def test_allows_editing_a_node_both_on_enter_and_on_leave():
# type: () -> None
ast = parse("{ a, b, c { a, b, c } }", no_location=True)
class TestVisitor(Visitor):
def __init__(self):
# type: () -> None
self.did_enter = False
self.did_leave = False
def enter(
self,
node, # type: Union[Document, OperationDefinition, SelectionSet]
*args # type: Any
):
# type: (...) -> Optional[OperationDefinition]
if isinstance(node, OperationDefinition):
self.did_enter = True
selection_set = node.selection_set
self.selections = None
if selection_set:
self.selections = selection_set.selections
new_selection_set = SelectionSet(selections=[])
return OperationDefinition(
name=node.name,
variable_definitions=node.variable_definitions,
directives=node.directives,
loc=node.loc,
operation=node.operation,
selection_set=new_selection_set,
)
def leave(
self,
node, # type: Union[Document, OperationDefinition, SelectionSet]
*args # type: Any
):
# type: (...) -> Optional[OperationDefinition]
if isinstance(node, OperationDefinition):
self.did_leave = True
new_selection_set = None
if self.selections:
new_selection_set = SelectionSet(selections=self.selections)
return OperationDefinition(
name=node.name,
variable_definitions=node.variable_definitions,
directives=node.directives,
loc=node.loc,
operation=node.operation,
selection_set=new_selection_set,
)
visitor = TestVisitor()
edited_ast = visit(ast, visitor)
assert ast == parse("{ a, b, c { a, b, c } }", no_location=True)
assert edited_ast == ast
assert visitor.did_enter
assert visitor.did_leave
def test_allows_editing_the_root_node_on_enter_and_on_leave():
# type: () -> None
ast = parse("{ a, b, c { a, b, c } }", no_location=True)
definitions = ast.definitions
class TestVisitor(Visitor):
def __init__(self):
# type: () -> None
self.did_enter = False
self.did_leave = False
def enter(self, node, *args):
# type: (Document, *Any) -> Document
if isinstance(node, Document):
self.did_enter = True
return Document(loc=node.loc, definitions=[])
def leave(self, node, *args):
# type: (Document, *Any) -> Document
if isinstance(node, Document):
self.did_leave = True
return Document(loc=node.loc, definitions=definitions)
visitor = TestVisitor()
edited_ast = visit(ast, visitor)
assert edited_ast == ast
assert visitor.did_enter
assert visitor.did_leave
def test_allows_for_editing_on_enter():
# type: () -> None
ast = parse("{ a, b, c { a, b, c } }", no_location=True)
class TestVisitor(Visitor):
def enter(self, node, *args):
# type: (Any, *Any) -> Optional[Any]
if isinstance(node, Field) and node.name.value == "b":
return REMOVE
edited_ast = visit(ast, TestVisitor())
assert ast == parse("{ a, b, c { a, b, c } }", no_location=True)
assert edited_ast == parse("{ a, c { a, c } }", no_location=True)
def test_allows_for_editing_on_leave():
# type: () -> None
ast = parse("{ a, b, c { a, b, c } }", no_location=True)
class TestVisitor(Visitor):
def leave(self, node, *args):
# type: (Union[Field, Name], *Any) -> Optional[Falsey]
if isinstance(node, Field) and node.name.value == "b":
return REMOVE
edited_ast = visit(ast, TestVisitor())
assert ast == parse("{ a, b, c { a, b, c } }", no_location=True)
assert edited_ast == parse("{ a, c { a, c } }", no_location=True)
def test_visits_edited_node():
# type: () -> None
added_field = Field(name=Name(value="__typename"))
ast = parse("{ a { x } }")
class TestVisitor(Visitor):
def __init__(self):
# type: () -> None
self.did_visit_added_field = False
def enter(self, node, *args):
# type: (Any, *Any) -> Optional[Field]
if isinstance(node, Field) and node.name.value == "a":
selection_set = node.selection_set
selections = []
if selection_set:
selections = selection_set.selections
new_selection_set = SelectionSet(selections=[added_field] + selections)
return Field(name=None, selection_set=new_selection_set)
if node is added_field:
self.did_visit_added_field = True
visitor = TestVisitor()
visit(ast, visitor)
assert visitor.did_visit_added_field
def test_allows_skipping_a_subtree():
# type: () -> None
visited = []
ast = parse("{ a, b { x }, c }")
class TestVisitor(Visitor):
def enter(self, node, *args):
# type: (Any, *Any) -> Optional[Any]
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
if isinstance(node, Field) and node.name.value == "b":
return False
def leave(self, node, *args):
# type: (Union[Field, Name, SelectionSet], *Any) -> None
visited.append(["leave", type(node).__name__, getattr(node, "value", None)])
visit(ast, TestVisitor())
assert visited == [
["enter", "Document", None],
["enter", "OperationDefinition", None],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Field", None],
["enter", "Name", "c"],
["leave", "Name", "c"],
["leave", "Field", None],
["leave", "SelectionSet", None],
["leave", "OperationDefinition", None],
["leave", "Document", None],
]
def test_allows_early_exit_while_visiting():
# type: () -> None
visited = []
ast = parse("{ a, b { x }, c }")
class TestVisitor(Visitor):
def enter(self, node, *args):
# type: (Any, *Any) -> Optional[Any]
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
if isinstance(node, Name) and node.value == "x":
return BREAK
def leave(self, node, *args):
# type: (Union[Field, Name], *Any) -> None
visited.append(["leave", type(node).__name__, getattr(node, "value", None)])
visit(ast, TestVisitor())
assert visited == [
["enter", "Document", None],
["enter", "OperationDefinition", None],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Name", "b"],
["leave", "Name", "b"],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "x"],
]
def test_allows_a_named_functions_visitor_api():
# type: () -> None
visited = []
ast = parse("{ a, b { x }, c }")
class TestVisitor(Visitor):
def enter_Name(self, node, *args):
# type: (Name, *Any) -> None
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
def enter_SelectionSet(self, node, *args):
# type: (SelectionSet, *Any) -> None
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
def leave_SelectionSet(self, node, *args):
# type: (SelectionSet, *Any) -> None
visited.append(["leave", type(node).__name__, getattr(node, "value", None)])
visit(ast, TestVisitor())
assert visited == [
["enter", "SelectionSet", None],
["enter", "Name", "a"],
["enter", "Name", "b"],
["enter", "SelectionSet", None],
["enter", "Name", "x"],
["leave", "SelectionSet", None],
["enter", "Name", "c"],
["leave", "SelectionSet", None],
]
def test_visits_kitchen_sink():
# type: () -> None
visited = []
ast = parse(KITCHEN_SINK)
class TestVisitor(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> None
kind = parent and type(parent).__name__
if kind == "list":
kind = None
visited.append(["enter", type(node).__name__, key, kind])
def leave(self, node, key, parent, *args):
# type: (Any, Union[int, str], Any, *List[Any]) -> None
kind = parent and type(parent).__name__
if kind == "list":
kind = None
visited.append(["leave", type(node).__name__, key, kind])
visit(ast, TestVisitor())
assert visited == [
["enter", "Document", None, None],
["enter", "OperationDefinition", 0, None],
["enter", "Name", "name", "OperationDefinition"],
["leave", "Name", "name", "OperationDefinition"],
["enter", "VariableDefinition", 0, None],
["enter", "Variable", "variable", "VariableDefinition"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "variable", "VariableDefinition"],
["enter", "NamedType", "type", "VariableDefinition"],
["enter", "Name", "name", "NamedType"],
["leave", "Name", "name", "NamedType"],
["leave", "NamedType", "type", "VariableDefinition"],
["leave", "VariableDefinition", 0, None],
["enter", "VariableDefinition", 1, None],
["enter", "Variable", "variable", "VariableDefinition"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "variable", "VariableDefinition"],
["enter", "NamedType", "type", "VariableDefinition"],
["enter", "Name", "name", "NamedType"],
["leave", "Name", "name", "NamedType"],
["leave", "NamedType", "type", "VariableDefinition"],
["enter", "EnumValue", "default_value", "VariableDefinition"],
["leave", "EnumValue", "default_value", "VariableDefinition"],
["leave", "VariableDefinition", 1, None],
["enter", "SelectionSet", "selection_set", "OperationDefinition"],
["enter", "Field", 0, None],
["enter", "Name", "alias", "Field"],
["leave", "Name", "alias", "Field"],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "ListValue", "value", "Argument"],
["enter", "IntValue", 0, None],
["leave", "IntValue", 0, None],
["enter", "IntValue", 1, None],
["leave", "IntValue", 1, None],
["leave", "ListValue", "value", "Argument"],
["leave", "Argument", 0, None],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["enter", "InlineFragment", 1, None],
["enter", "NamedType", "type_condition", "InlineFragment"],
["enter", "Name", "name", "NamedType"],
["leave", "Name", "name", "NamedType"],
["leave", "NamedType", "type_condition", "InlineFragment"],
["enter", "Directive", 0, None],
["enter", "Name", "name", "Directive"],
["leave", "Name", "name", "Directive"],
["leave", "Directive", 0, None],
["enter", "SelectionSet", "selection_set", "InlineFragment"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["enter", "Field", 1, None],
["enter", "Name", "alias", "Field"],
["leave", "Name", "alias", "Field"],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "IntValue", "value", "Argument"],
["leave", "IntValue", "value", "Argument"],
["leave", "Argument", 0, None],
["enter", "Argument", 1, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "Variable", "value", "Argument"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "value", "Argument"],
["leave", "Argument", 1, None],
["enter", "Directive", 0, None],
["enter", "Name", "name", "Directive"],
["leave", "Name", "name", "Directive"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "Variable", "value", "Argument"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "value", "Argument"],
["leave", "Argument", 0, None],
["leave", "Directive", 0, None],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["enter", "FragmentSpread", 1, None],
["enter", "Name", "name", "FragmentSpread"],
["leave", "Name", "name", "FragmentSpread"],
["leave", "FragmentSpread", 1, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 1, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "InlineFragment"],
["leave", "InlineFragment", 1, None],
["enter", "InlineFragment", 2, None],
["enter", "Directive", 0, None],
["enter", "Name", "name", "Directive"],
["leave", "Name", "name", "Directive"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "Variable", "value", "Argument"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "value", "Argument"],
["leave", "Argument", 0, None],
["leave", "Directive", 0, None],
["enter", "SelectionSet", "selection_set", "InlineFragment"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "InlineFragment"],
["leave", "InlineFragment", 2, None],
["enter", "InlineFragment", 3, None],
["enter", "SelectionSet", "selection_set", "InlineFragment"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "InlineFragment"],
["leave", "InlineFragment", 3, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "OperationDefinition"],
["leave", "OperationDefinition", 0, None],
["enter", "OperationDefinition", 1, None],
["enter", "Name", "name", "OperationDefinition"],
["leave", "Name", "name", "OperationDefinition"],
["enter", "SelectionSet", "selection_set", "OperationDefinition"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "IntValue", "value", "Argument"],
["leave", "IntValue", "value", "Argument"],
["leave", "Argument", 0, None],
["enter", "Directive", 0, None],
["enter", "Name", "name", "Directive"],
["leave", "Name", "name", "Directive"],
["leave", "Directive", 0, None],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "OperationDefinition"],
["leave", "OperationDefinition", 1, None],
["enter", "OperationDefinition", 2, None],
["enter", "Name", "name", "OperationDefinition"],
["leave", "Name", "name", "OperationDefinition"],
["enter", "VariableDefinition", 0, None],
["enter", "Variable", "variable", "VariableDefinition"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "variable", "VariableDefinition"],
["enter", "NamedType", "type", "VariableDefinition"],
["enter", "Name", "name", "NamedType"],
["leave", "Name", "name", "NamedType"],
["leave", "NamedType", "type", "VariableDefinition"],
["leave", "VariableDefinition", 0, None],
["enter", "SelectionSet", "selection_set", "OperationDefinition"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "Variable", "value", "Argument"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "value", "Argument"],
["leave", "Argument", 0, None],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 0, None],
["enter", "Field", 1, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "SelectionSet", "selection_set", "Field"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 1, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "Field"],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "OperationDefinition"],
["leave", "OperationDefinition", 2, None],
["enter", "FragmentDefinition", 3, None],
["enter", "Name", "name", "FragmentDefinition"],
["leave", "Name", "name", "FragmentDefinition"],
["enter", "NamedType", "type_condition", "FragmentDefinition"],
["enter", "Name", "name", "NamedType"],
["leave", "Name", "name", "NamedType"],
["leave", "NamedType", "type_condition", "FragmentDefinition"],
["enter", "SelectionSet", "selection_set", "FragmentDefinition"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "Variable", "value", "Argument"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "value", "Argument"],
["leave", "Argument", 0, None],
["enter", "Argument", 1, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "Variable", "value", "Argument"],
["enter", "Name", "name", "Variable"],
["leave", "Name", "name", "Variable"],
["leave", "Variable", "value", "Argument"],
["leave", "Argument", 1, None],
["enter", "Argument", 2, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "ObjectValue", "value", "Argument"],
["enter", "ObjectField", 0, None],
["enter", "Name", "name", "ObjectField"],
["leave", "Name", "name", "ObjectField"],
["enter", "StringValue", "value", "ObjectField"],
["leave", "StringValue", "value", "ObjectField"],
["leave", "ObjectField", 0, None],
["leave", "ObjectValue", "value", "Argument"],
["leave", "Argument", 2, None],
["leave", "Field", 0, None],
["leave", "SelectionSet", "selection_set", "FragmentDefinition"],
["leave", "FragmentDefinition", 3, None],
["enter", "OperationDefinition", 4, None],
["enter", "SelectionSet", "selection_set", "OperationDefinition"],
["enter", "Field", 0, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["enter", "Argument", 0, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "BooleanValue", "value", "Argument"],
["leave", "BooleanValue", "value", "Argument"],
["leave", "Argument", 0, None],
["enter", "Argument", 1, None],
["enter", "Name", "name", "Argument"],
["leave", "Name", "name", "Argument"],
["enter", "BooleanValue", "value", "Argument"],
["leave", "BooleanValue", "value", "Argument"],
["leave", "Argument", 1, None],
["leave", "Field", 0, None],
["enter", "Field", 1, None],
["enter", "Name", "name", "Field"],
["leave", "Name", "name", "Field"],
["leave", "Field", 1, None],
["leave", "SelectionSet", "selection_set", "OperationDefinition"],
["leave", "OperationDefinition", 4, None],
["leave", "Document", None, None],
]
def test_visits_in_pararell_allows_skipping_a_subtree():
# type: () -> None
visited = []
ast = parse("{ a, b { x }, c }")
class TestVisitor(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> Optional[Any]
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
if type(node).__name__ == "Field" and node.name.value == "b":
return False
def leave(
self,
node, # type: Union[Field, Name, SelectionSet]
key, # type: Union[int, str]
parent, # type: Union[List[Field], Field, OperationDefinition]
*args # type: List[Any]
):
# type: (...) -> None
visited.append(["leave", type(node).__name__, getattr(node, "value", None)])
visit(ast, ParallelVisitor([TestVisitor()]))
assert visited == [
["enter", "Document", None],
["enter", "OperationDefinition", None],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Field", None],
["enter", "Name", "c"],
["leave", "Name", "c"],
["leave", "Field", None],
["leave", "SelectionSet", None],
["leave", "OperationDefinition", None],
["leave", "Document", None],
]
def test_visits_in_pararell_allows_skipping_different_subtrees():
# type: () -> None
visited = []
ast = parse("{ a { x }, b { y} }")
class TestVisitor(Visitor):
def __init__(self, name):
# type: (str) -> None
self.name = name
def enter(
self,
node, # type: Union[Document, OperationDefinition, SelectionSet]
key, # type: Union[None, int, str]
parent, # type: Union[List[OperationDefinition], None, OperationDefinition]
*args # type: Any
):
# type: (...) -> Optional[Any]
visited.append(
[
"no-{}".format(self.name),
"enter",
type(node).__name__,
getattr(node, "value", None),
]
)
if type(node).__name__ == "Field" and node.name.value == self.name:
return False
def leave(
self,
node, # type: Union[Field, Name, SelectionSet]
key, # type: Union[int, str]
parent, # type: Union[List[Field], Field]
*args # type: List[Any]
):
# type: (...) -> None
visited.append(
[
"no-{}".format(self.name),
"leave",
type(node).__name__,
getattr(node, "value", None),
]
)
visit(ast, ParallelVisitor([TestVisitor("a"), TestVisitor("b")]))
assert visited == [
["no-a", "enter", "Document", None],
["no-b", "enter", "Document", None],
["no-a", "enter", "OperationDefinition", None],
["no-b", "enter", "OperationDefinition", None],
["no-a", "enter", "SelectionSet", None],
["no-b", "enter", "SelectionSet", None],
["no-a", "enter", "Field", None],
["no-b", "enter", "Field", None],
["no-b", "enter", "Name", "a"],
["no-b", "leave", "Name", "a"],
["no-b", "enter", "SelectionSet", None],
["no-b", "enter", "Field", None],
["no-b", "enter", "Name", "x"],
["no-b", "leave", "Name", "x"],
["no-b", "leave", "Field", None],
["no-b", "leave", "SelectionSet", None],
["no-b", "leave", "Field", None],
["no-a", "enter", "Field", None],
["no-b", "enter", "Field", None],
["no-a", "enter", "Name", "b"],
["no-a", "leave", "Name", "b"],
["no-a", "enter", "SelectionSet", None],
["no-a", "enter", "Field", None],
["no-a", "enter", "Name", "y"],
["no-a", "leave", "Name", "y"],
["no-a", "leave", "Field", None],
["no-a", "leave", "SelectionSet", None],
["no-a", "leave", "Field", None],
["no-a", "leave", "SelectionSet", None],
["no-b", "leave", "SelectionSet", None],
["no-a", "leave", "OperationDefinition", None],
["no-b", "leave", "OperationDefinition", None],
["no-a", "leave", "Document", None],
["no-b", "leave", "Document", None],
]
def test_visits_in_pararell_allows_early_exit_while_visiting():
# type: () -> None
visited = []
ast = parse("{ a, b { x }, c }")
class TestVisitor(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> None
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
def leave(
self,
node, # type: Union[Field, Name]
key, # type: Union[int, str]
parent, # type: Union[List[Field], Field]
*args # type: List[Any]
):
# type: (...) -> Optional[object]
visited.append(["leave", type(node).__name__, getattr(node, "value", None)])
if type(node).__name__ == "Name" and node.value == "x":
return BREAK
visit(ast, ParallelVisitor([TestVisitor()]))
assert visited == [
["enter", "Document", None],
["enter", "OperationDefinition", None],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Name", "b"],
["leave", "Name", "b"],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "x"],
["leave", "Name", "x"],
]
def test_visits_in_pararell_allows_early_exit_from_different_points():
# type: () -> None
visited = []
ast = parse("{ a { y }, b { x } }")
class TestVisitor(Visitor):
def __init__(self, name):
# type: (str) -> None
self.name = name
def enter(
self,
node, # type: Union[Document, OperationDefinition, SelectionSet]
key, # type: Union[None, int, str]
parent, # type: Union[List[OperationDefinition], None, OperationDefinition]
*args # type: Any
):
# type: (...) -> None
visited.append(
[
"break-{}".format(self.name),
"enter",
type(node).__name__,
getattr(node, "value", None),
]
)
def leave(
self,
node, # type: Union[Field, Name]
key, # type: Union[int, str]
parent, # type: Union[List[Field], Field]
*args # type: List[Any]
):
# type: (...) -> Optional[Any]
visited.append(
[
"break-{}".format(self.name),
"leave",
type(node).__name__,
getattr(node, "value", None),
]
)
if type(node).__name__ == "Field" and node.name.value == self.name:
return BREAK
visit(ast, ParallelVisitor([TestVisitor("a"), TestVisitor("b")]))
assert visited == [
["break-a", "enter", "Document", None],
["break-b", "enter", "Document", None],
["break-a", "enter", "OperationDefinition", None],
["break-b", "enter", "OperationDefinition", None],
["break-a", "enter", "SelectionSet", None],
["break-b", "enter", "SelectionSet", None],
["break-a", "enter", "Field", None],
["break-b", "enter", "Field", None],
["break-a", "enter", "Name", "a"],
["break-b", "enter", "Name", "a"],
["break-a", "leave", "Name", "a"],
["break-b", "leave", "Name", "a"],
["break-a", "enter", "SelectionSet", None],
["break-b", "enter", "SelectionSet", None],
["break-a", "enter", "Field", None],
["break-b", "enter", "Field", None],
["break-a", "enter", "Name", "y"],
["break-b", "enter", "Name", "y"],
["break-a", "leave", "Name", "y"],
["break-b", "leave", "Name", "y"],
["break-a", "leave", "Field", None],
["break-b", "leave", "Field", None],
["break-a", "leave", "SelectionSet", None],
["break-b", "leave", "SelectionSet", None],
["break-a", "leave", "Field", None],
["break-b", "leave", "Field", None],
["break-b", "enter", "Field", None],
["break-b", "enter", "Name", "b"],
["break-b", "leave", "Name", "b"],
["break-b", "enter", "SelectionSet", None],
["break-b", "enter", "Field", None],
["break-b", "enter", "Name", "x"],
["break-b", "leave", "Name", "x"],
["break-b", "leave", "Field", None],
["break-b", "leave", "SelectionSet", None],
["break-b", "leave", "Field", None],
]
def test_visits_in_pararell_allows_for_editing_on_enter():
# type: () -> None
visited = []
ast = parse("{ a, b, c { a, b, c } }", no_location=True)
class TestVisitor1(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> Optional[Any]
if type(node).__name__ == "Field" and node.name.value == "b":
return REMOVE
class TestVisitor2(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> None
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
def leave(
self,
node, # type: Union[Field, Name]
key, # type: Union[int, str]
parent, # type: Union[List[Field], Field]
*args # type: List[Any]
):
# type: (...) -> None
visited.append(["leave", type(node).__name__, getattr(node, "value", None)])
edited_ast = visit(ast, ParallelVisitor([TestVisitor1(), TestVisitor2()]))
assert ast == parse("{ a, b, c { a, b, c } }", no_location=True)
assert edited_ast == parse("{ a, c { a, c } }", no_location=True)
assert visited == [
["enter", "Document", None],
["enter", "OperationDefinition", None],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Name", "c"],
["leave", "Name", "c"],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Name", "c"],
["leave", "Name", "c"],
["leave", "Field", None],
["leave", "SelectionSet", None],
["leave", "Field", None],
["leave", "SelectionSet", None],
["leave", "OperationDefinition", None],
["leave", "Document", None],
]
def test_visits_in_pararell_allows_for_editing_on_leave():
# type: () -> None
visited = []
ast = parse("{ a, b, c { a, b, c } }", no_location=True)
class TestVisitor1(Visitor):
def leave(
self,
node, # type: Union[Field, Name]
key, # type: Union[int, str]
parent, # type: Union[List[Field], Field]
*args # type: List[Any]
):
# type: (...) -> Optional[Falsey]
if type(node).__name__ == "Field" and node.name.value == "b":
return REMOVE
class TestVisitor2(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> None
visited.append(["enter", type(node).__name__, getattr(node, "value", None)])
def leave(
self,
node, # type: Union[Field, Name]
key, # type: Union[int, str]
parent, # type: Union[List[Field], Field]
*args # type: List[Any]
):
# type: (...) -> None
visited.append(["leave", type(node).__name__, getattr(node, "value", None)])
edited_ast = visit(ast, ParallelVisitor([TestVisitor1(), TestVisitor2()]))
assert ast == parse("{ a, b, c { a, b, c } }", no_location=True)
assert edited_ast == parse("{ a, c { a, c } }", no_location=True)
assert visited == [
["enter", "Document", None],
["enter", "OperationDefinition", None],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Name", "b"],
["leave", "Name", "b"],
["enter", "Field", None],
["enter", "Name", "c"],
["leave", "Name", "c"],
["enter", "SelectionSet", None],
["enter", "Field", None],
["enter", "Name", "a"],
["leave", "Name", "a"],
["leave", "Field", None],
["enter", "Field", None],
["enter", "Name", "b"],
["leave", "Name", "b"],
["enter", "Field", None],
["enter", "Name", "c"],
["leave", "Name", "c"],
["leave", "Field", None],
["leave", "SelectionSet", None],
["leave", "Field", None],
["leave", "SelectionSet", None],
["leave", "OperationDefinition", None],
["leave", "Document", None],
]
def test_visits_with_typeinfo_maintains_type_info_during_visit():
# type: () -> None
visited = []
ast = parse("{ human(id: 4) { name, pets { name }, unknown } }")
type_info = TypeInfo(test_schema)
class TestVisitor(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> None
parent_type = type_info.get_parent_type()
_type = type_info.get_type()
input_type = type_info.get_input_type()
visited.append(
[
"enter",
type(node).__name__,
node.value if type(node).__name__ == "Name" else None,
str(parent_type) if parent_type else None,
str(_type) if _type else None,
str(input_type) if input_type else None,
]
)
def leave(
self,
node, # type: Union[Argument, IntValue, Name]
key, # type: Union[int, str]
parent, # type: Union[List[Argument], Argument, Field]
*args # type: List[Any]
):
# type: (...) -> None
parent_type = type_info.get_parent_type()
_type = type_info.get_type()
input_type = type_info.get_input_type()
visited.append(
[
"leave",
type(node).__name__,
node.value if type(node).__name__ == "Name" else None,
str(parent_type) if parent_type else None,
str(_type) if _type else None,
str(input_type) if input_type else None,
]
)
visit(ast, TypeInfoVisitor(type_info, TestVisitor()))
assert visited == [
["enter", "Document", None, None, None, None],
["enter", "OperationDefinition", None, None, "QueryRoot", None],
["enter", "SelectionSet", None, "QueryRoot", "QueryRoot", None],
["enter", "Field", None, "QueryRoot", "Human", None],
["enter", "Name", "human", "QueryRoot", "Human", None],
["leave", "Name", "human", "QueryRoot", "Human", None],
["enter", "Argument", None, "QueryRoot", "Human", "ID"],
["enter", "Name", "id", "QueryRoot", "Human", "ID"],
["leave", "Name", "id", "QueryRoot", "Human", "ID"],
["enter", "IntValue", None, "QueryRoot", "Human", "ID"],
["leave", "IntValue", None, "QueryRoot", "Human", "ID"],
["leave", "Argument", None, "QueryRoot", "Human", "ID"],
["enter", "SelectionSet", None, "Human", "Human", None],
["enter", "Field", None, "Human", "String", None],
["enter", "Name", "name", "Human", "String", None],
["leave", "Name", "name", "Human", "String", None],
["leave", "Field", None, "Human", "String", None],
["enter", "Field", None, "Human", "[Pet]", None],
["enter", "Name", "pets", "Human", "[Pet]", None],
["leave", "Name", "pets", "Human", "[Pet]", None],
["enter", "SelectionSet", None, "Pet", "[Pet]", None],
["enter", "Field", None, "Pet", "String", None],
["enter", "Name", "name", "Pet", "String", None],
["leave", "Name", "name", "Pet", "String", None],
["leave", "Field", None, "Pet", "String", None],
["leave", "SelectionSet", None, "Pet", "[Pet]", None],
["leave", "Field", None, "Human", "[Pet]", None],
["enter", "Field", None, "Human", None, None],
["enter", "Name", "unknown", "Human", None, None],
["leave", "Name", "unknown", "Human", None, None],
["leave", "Field", None, "Human", None, None],
["leave", "SelectionSet", None, "Human", "Human", None],
["leave", "Field", None, "QueryRoot", "Human", None],
["leave", "SelectionSet", None, "QueryRoot", "QueryRoot", None],
["leave", "OperationDefinition", None, None, "QueryRoot", None],
["leave", "Document", None, None, None, None],
]
def test_visits_with_typeinfo_maintains_type_info_during_edit():
# type: () -> None
visited = []
ast = parse("{ human(id: 4) { name, pets }, alien }")
type_info = TypeInfo(test_schema)
class TestVisitor(Visitor):
def enter(self, node, key, parent, *args):
# type: (Any, Union[None, int, str], Any, *List[Any]) -> Optional[Any]
parent_type = type_info.get_parent_type()
_type = type_info.get_type()
input_type = type_info.get_input_type()
visited.append(
[
"enter",
type(node).__name__,
node.value if type(node).__name__ == "Name" else None,
str(parent_type) if parent_type else None,
str(_type) if _type else None,
str(input_type) if input_type else None,
]
)
# Make a query valid by adding missing selection sets.
if (
type(node).__name__ == "Field"
and not node.selection_set
and is_composite_type(get_named_type(_type))
):
return Field(
alias=node.alias,
name=node.name,
arguments=node.arguments,
directives=node.directives,
selection_set=SelectionSet([Field(name=Name(value="__typename"))]),
)
def leave(
self,
node, # type: Union[Argument, IntValue, Name]
key, # type: Union[int, str]
parent, # type: Union[List[Argument], Argument, Field]
*args # type: List[Any]
):
# type: (...) -> None
parent_type = type_info.get_parent_type()
_type = type_info.get_type()
input_type = type_info.get_input_type()
visited.append(
[
"leave",
type(node).__name__,
node.value if type(node).__name__ == "Name" else None,
str(parent_type) if parent_type else None,
str(_type) if _type else None,
str(input_type) if input_type else None,
]
)
edited_ast = visit(ast, TypeInfoVisitor(type_info, TestVisitor()))
# assert print_ast(ast) == print_ast(parse(
# '{ human(id: 4) { name, pets }, alien }'
# ))
assert print_ast(edited_ast) == print_ast(
parse("{ human(id: 4) { name, pets { __typename } }, alien { __typename } }")
)
assert visited == [
["enter", "Document", None, None, None, None],
["enter", "OperationDefinition", None, None, "QueryRoot", None],
["enter", "SelectionSet", None, "QueryRoot", "QueryRoot", None],
["enter", "Field", None, "QueryRoot", "Human", None],
["enter", "Name", "human", "QueryRoot", "Human", None],
["leave", "Name", "human", "QueryRoot", "Human", None],
["enter", "Argument", None, "QueryRoot", "Human", "ID"],
["enter", "Name", "id", "QueryRoot", "Human", "ID"],
["leave", "Name", "id", "QueryRoot", "Human", "ID"],
["enter", "IntValue", None, "QueryRoot", "Human", "ID"],
["leave", "IntValue", None, "QueryRoot", "Human", "ID"],
["leave", "Argument", None, "QueryRoot", "Human", "ID"],
["enter", "SelectionSet", None, "Human", "Human", None],
["enter", "Field", None, "Human", "String", None],
["enter", "Name", "name", "Human", "String", None],
["leave", "Name", "name", "Human", "String", None],
["leave", "Field", None, "Human", "String", None],
["enter", "Field", None, "Human", "[Pet]", None],
["enter", "Name", "pets", "Human", "[Pet]", None],
["leave", "Name", "pets", "Human", "[Pet]", None],
["enter", "SelectionSet", None, "Pet", "[Pet]", None],
["enter", "Field", None, "Pet", "String!", None],
["enter", "Name", "__typename", "Pet", "String!", None],
["leave", "Name", "__typename", "Pet", "String!", None],
["leave", "Field", None, "Pet", "String!", None],
["leave", "SelectionSet", None, "Pet", "[Pet]", None],
["leave", "Field", None, "Human", "[Pet]", None],
["leave", "SelectionSet", None, "Human", "Human", None],
["leave", "Field", None, "QueryRoot", "Human", None],
["enter", "Field", None, "QueryRoot", "Alien", None],
["enter", "Name", "alien", "QueryRoot", "Alien", None],
["leave", "Name", "alien", "QueryRoot", "Alien", None],
["enter", "SelectionSet", None, "Alien", "Alien", None],
["enter", "Field", None, "Alien", "String!", None],
["enter", "Name", "__typename", "Alien", "String!", None],
["leave", "Name", "__typename", "Alien", "String!", None],
["leave", "Field", None, "Alien", "String!", None],
["leave", "SelectionSet", None, "Alien", "Alien", None],
["leave", "Field", None, "QueryRoot", "Alien", None],
["leave", "SelectionSet", None, "QueryRoot", "QueryRoot", None],
["leave", "OperationDefinition", None, None, "QueryRoot", None],
["leave", "Document", None, None, None, None],
]
| 40.376461
| 88
| 0.50154
| 4,682
| 48,371
| 5.070056
| 0.033533
| 0.0618
| 0.041621
| 0.030795
| 0.890387
| 0.843373
| 0.816244
| 0.796487
| 0.780773
| 0.760974
| 0
| 0.003333
| 0.292882
| 48,371
| 1,197
| 89
| 40.410192
| 0.690679
| 0.068326
| 0
| 0.765504
| 0
| 0
| 0.276264
| 0
| 0
| 0
| 0
| 0
| 0.028101
| 1
| 0.054264
| false
| 0
| 0.01938
| 0
| 0.107558
| 0.001938
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13a41edd3c4f4b97f6dbc2d960202fd14d839f6b
| 49,903
|
py
|
Python
|
venv/lib/python3.8/site-packages/spaceone/api/inventory/v1/cloud_service_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/inventory/v1/cloud_service_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/inventory/v1/cloud_service_pb2.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: spaceone/api/inventory/v1/cloud_service.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from spaceone.api.core.v1 import query_pb2 as spaceone_dot_api_dot_core_dot_v1_dot_query__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='spaceone/api/inventory/v1/cloud_service.proto',
package='spaceone.api.inventory.v1',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n-spaceone/api/inventory/v1/cloud_service.proto\x12\x19spaceone.api.inventory.v1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1cgoogle/api/annotations.proto\x1a spaceone/api/core/v1/query.proto\"C\n\x15\x43loudServiceReference\x12\x13\n\x0bresource_id\x18\x01 \x01(\t\x12\x15\n\rexternal_link\x18\x02 \x01(\t\"\xe9\x02\n\x14\x43reateServiceRequest\x12\x1a\n\x12\x63loud_service_type\x18\x01 \x01(\t\x12\x10\n\x08provider\x18\x02 \x01(\t\x12\x1b\n\x13\x63loud_service_group\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12%\n\x04\x64\x61ta\x18\x0b \x01(\x0b\x32\x17.google.protobuf.Struct\x12)\n\x08metadata\x18\x0c \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x43\n\treference\x18\r \x01(\x0b\x32\x30.spaceone.api.inventory.v1.CloudServiceReference\x12%\n\x04tags\x18\x0e \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x12\n\nproject_id\x18\x16 \x01(\t\x12\x11\n\tdomain_id\x18\x17 \x01(\t\x12\x13\n\x0bregion_code\x18\x18 \x01(\t\"\xee\x02\n\x19UpdateCloudServiceRequest\x12\x18\n\x10\x63loud_service_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12%\n\x04\x64\x61ta\x18\x0b \x01(\x0b\x32\x17.google.protobuf.Struct\x12)\n\x08metadata\x18\x0c \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x43\n\treference\x18\r \x01(\x0b\x32\x30.spaceone.api.inventory.v1.CloudServiceReference\x12%\n\x04tags\x18\x0e \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x13\n\x0bregion_code\x18\x14 \x01(\t\x12\x12\n\nproject_id\x18\x15 \x01(\t\x12\x11\n\tdomain_id\x18\x16 \x01(\t\x12\x17\n\x0frelease_project\x18\x1e \x01(\x08\x12\x16\n\x0erelease_region\x18\x1f \x01(\x08\"W\n\x1aPinCloudServiceDataRequest\x12\x18\n\x10\x63loud_service_id\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x11\n\tdomain_id\x18\x03 \x01(\t\"B\n\x13\x43loudServiceRequest\x12\x18\n\x10\x63loud_service_id\x18\x01 \x01(\t\x12\x11\n\tdomain_id\x18\x02 \x01(\t\"S\n\x16GetCloudServiceRequest\x12\x18\n\x10\x63loud_service_id\x18\x01 \x01(\t\x12\x11\n\tdomain_id\x18\x02 \x01(\t\x12\x0c\n\x04only\x18\x03 \x03(\t\"\x98\x02\n\x11\x43loudServiceQuery\x12*\n\x05query\x18\x01 \x01(\x0b\x32\x1b.spaceone.api.core.v1.Query\x12\x18\n\x10\x63loud_service_id\x18\x02 \x01(\t\x12\x1a\n\x12\x63loud_service_type\x18\x03 \x01(\t\x12\x1b\n\x13\x63loud_service_group\x18\x04 \x01(\t\x12\x10\n\x08provider\x18\x05 \x01(\t\x12\r\n\x05state\x18\x06 \x01(\t\x12\x13\n\x0bregion_code\x18\x07 \x01(\t\x12\x0c\n\x04name\x18\x08 \x01(\t\x12\x19\n\x11resource_group_id\x18\x15 \x01(\t\x12\x12\n\nproject_id\x18\x16 \x01(\t\x12\x11\n\tdomain_id\x18\x17 \x01(\t\"\xfc\x03\n\x10\x43loudServiceInfo\x12\x18\n\x10\x63loud_service_id\x18\x01 \x01(\t\x12\x1a\n\x12\x63loud_service_type\x18\x02 \x01(\t\x12\x10\n\x08provider\x18\x03 \x01(\t\x12\x1b\n\x13\x63loud_service_group\x18\x04 \x01(\t\x12\r\n\x05state\x18\x05 \x01(\t\x12\x0c\n\x04name\x18\x06 \x01(\t\x12%\n\x04\x64\x61ta\x18\x0b \x01(\x0b\x32\x17.google.protobuf.Struct\x12)\n\x08metadata\x18\x0c \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x43\n\treference\x18\r \x01(\x0b\x32\x30.spaceone.api.inventory.v1.CloudServiceReference\x12%\n\x04tags\x18\x0e \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x30\n\x0f\x63ollection_info\x18\x0f \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x12\n\nproject_id\x18\x16 \x01(\t\x12\x11\n\tdomain_id\x18\x17 \x01(\t\x12\x13\n\x0bregion_code\x18\x18 \x01(\t\x12\x12\n\ncreated_at\x18\x1f \x01(\t\x12\x12\n\nupdated_at\x18 \x01(\t\x12\x12\n\ndeleted_at\x18! \x01(\t\"f\n\x11\x43loudServicesInfo\x12<\n\x07results\x18\x01 \x03(\x0b\x32+.spaceone.api.inventory.v1.CloudServiceInfo\x12\x13\n\x0btotal_count\x18\x02 \x01(\x05\"{\n\x15\x43loudServiceStatQuery\x12\x34\n\x05query\x18\x01 \x01(\x0b\x32%.spaceone.api.core.v1.StatisticsQuery\x12\x11\n\tdomain_id\x18\x02 \x01(\t\x12\x19\n\x11resource_group_id\x18\x03 \x01(\t2\xd0\x08\n\x0c\x43loudService\x12\x8c\x01\n\x06\x63reate\x12/.spaceone.api.inventory.v1.CreateServiceRequest\x1a+.spaceone.api.inventory.v1.CloudServiceInfo\"$\x82\xd3\xe4\x93\x02\x1e\"\x1c/inventory/v1/cloud-services\x12\xa3\x01\n\x06update\x12\x34.spaceone.api.inventory.v1.UpdateCloudServiceRequest\x1a+.spaceone.api.inventory.v1.CloudServiceInfo\"6\x82\xd3\xe4\x93\x02\x30\x1a./inventory/v1/cloud-service/{cloud_service_id}\x12\xaf\x01\n\x08pin_data\x12\x35.spaceone.api.inventory.v1.PinCloudServiceDataRequest\x1a+.spaceone.api.inventory.v1.CloudServiceInfo\"?\x82\xd3\xe4\x93\x02\x39\x1a\x37/inventory/v1/cloud-service/{cloud_service_id}/pin-data\x12\x88\x01\n\x06\x64\x65lete\x12..spaceone.api.inventory.v1.CloudServiceRequest\x1a\x16.google.protobuf.Empty\"6\x82\xd3\xe4\x93\x02\x30*./inventory/v1/cloud-service/{cloud_service_id}\x12\x9d\x01\n\x03get\x12\x31.spaceone.api.inventory.v1.GetCloudServiceRequest\x1a+.spaceone.api.inventory.v1.CloudServiceInfo\"6\x82\xd3\xe4\x93\x02\x30\x12./inventory/v1/cloud-service/{cloud_service_id}\x12\xaf\x01\n\x04list\x12,.spaceone.api.inventory.v1.CloudServiceQuery\x1a,.spaceone.api.inventory.v1.CloudServicesInfo\"K\x82\xd3\xe4\x93\x02\x45\x12\x1c/inventory/v1/cloud-servicesZ%\"#/inventory/v1/cloud-services/search\x12|\n\x04stat\x12\x30.spaceone.api.inventory.v1.CloudServiceStatQuery\x1a\x17.google.protobuf.Struct\")\x82\xd3\xe4\x93\x02#\"!/inventory/v1/cloud-services/statb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,spaceone_dot_api_dot_core_dot_v1_dot_query__pb2.DESCRIPTOR,])
_CLOUDSERVICEREFERENCE = _descriptor.Descriptor(
name='CloudServiceReference',
full_name='spaceone.api.inventory.v1.CloudServiceReference',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_id', full_name='spaceone.api.inventory.v1.CloudServiceReference.resource_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_link', full_name='spaceone.api.inventory.v1.CloudServiceReference.external_link', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=199,
serialized_end=266,
)
_CREATESERVICEREQUEST = _descriptor.Descriptor(
name='CreateServiceRequest',
full_name='spaceone.api.inventory.v1.CreateServiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cloud_service_type', full_name='spaceone.api.inventory.v1.CreateServiceRequest.cloud_service_type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='provider', full_name='spaceone.api.inventory.v1.CreateServiceRequest.provider', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cloud_service_group', full_name='spaceone.api.inventory.v1.CreateServiceRequest.cloud_service_group', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.inventory.v1.CreateServiceRequest.name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='spaceone.api.inventory.v1.CreateServiceRequest.data', index=4,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='spaceone.api.inventory.v1.CreateServiceRequest.metadata', index=5,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reference', full_name='spaceone.api.inventory.v1.CreateServiceRequest.reference', index=6,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tags', full_name='spaceone.api.inventory.v1.CreateServiceRequest.tags', index=7,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='spaceone.api.inventory.v1.CreateServiceRequest.project_id', index=8,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.CreateServiceRequest.domain_id', index=9,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region_code', full_name='spaceone.api.inventory.v1.CreateServiceRequest.region_code', index=10,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=269,
serialized_end=630,
)
_UPDATECLOUDSERVICEREQUEST = _descriptor.Descriptor(
name='UpdateCloudServiceRequest',
full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cloud_service_id', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.cloud_service_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.data', index=2,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.metadata', index=3,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reference', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.reference', index=4,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tags', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.tags', index=5,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region_code', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.region_code', index=6,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.project_id', index=7,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.domain_id', index=8,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='release_project', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.release_project', index=9,
number=30, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='release_region', full_name='spaceone.api.inventory.v1.UpdateCloudServiceRequest.release_region', index=10,
number=31, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=633,
serialized_end=999,
)
_PINCLOUDSERVICEDATAREQUEST = _descriptor.Descriptor(
name='PinCloudServiceDataRequest',
full_name='spaceone.api.inventory.v1.PinCloudServiceDataRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cloud_service_id', full_name='spaceone.api.inventory.v1.PinCloudServiceDataRequest.cloud_service_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keys', full_name='spaceone.api.inventory.v1.PinCloudServiceDataRequest.keys', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.PinCloudServiceDataRequest.domain_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1001,
serialized_end=1088,
)
_CLOUDSERVICEREQUEST = _descriptor.Descriptor(
name='CloudServiceRequest',
full_name='spaceone.api.inventory.v1.CloudServiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cloud_service_id', full_name='spaceone.api.inventory.v1.CloudServiceRequest.cloud_service_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.CloudServiceRequest.domain_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1090,
serialized_end=1156,
)
_GETCLOUDSERVICEREQUEST = _descriptor.Descriptor(
name='GetCloudServiceRequest',
full_name='spaceone.api.inventory.v1.GetCloudServiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cloud_service_id', full_name='spaceone.api.inventory.v1.GetCloudServiceRequest.cloud_service_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.GetCloudServiceRequest.domain_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='only', full_name='spaceone.api.inventory.v1.GetCloudServiceRequest.only', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1158,
serialized_end=1241,
)
_CLOUDSERVICEQUERY = _descriptor.Descriptor(
name='CloudServiceQuery',
full_name='spaceone.api.inventory.v1.CloudServiceQuery',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='spaceone.api.inventory.v1.CloudServiceQuery.query', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cloud_service_id', full_name='spaceone.api.inventory.v1.CloudServiceQuery.cloud_service_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cloud_service_type', full_name='spaceone.api.inventory.v1.CloudServiceQuery.cloud_service_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cloud_service_group', full_name='spaceone.api.inventory.v1.CloudServiceQuery.cloud_service_group', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='provider', full_name='spaceone.api.inventory.v1.CloudServiceQuery.provider', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='spaceone.api.inventory.v1.CloudServiceQuery.state', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region_code', full_name='spaceone.api.inventory.v1.CloudServiceQuery.region_code', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.inventory.v1.CloudServiceQuery.name', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_group_id', full_name='spaceone.api.inventory.v1.CloudServiceQuery.resource_group_id', index=8,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='spaceone.api.inventory.v1.CloudServiceQuery.project_id', index=9,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.CloudServiceQuery.domain_id', index=10,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1244,
serialized_end=1524,
)
_CLOUDSERVICEINFO = _descriptor.Descriptor(
name='CloudServiceInfo',
full_name='spaceone.api.inventory.v1.CloudServiceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cloud_service_id', full_name='spaceone.api.inventory.v1.CloudServiceInfo.cloud_service_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cloud_service_type', full_name='spaceone.api.inventory.v1.CloudServiceInfo.cloud_service_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='provider', full_name='spaceone.api.inventory.v1.CloudServiceInfo.provider', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cloud_service_group', full_name='spaceone.api.inventory.v1.CloudServiceInfo.cloud_service_group', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='spaceone.api.inventory.v1.CloudServiceInfo.state', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='spaceone.api.inventory.v1.CloudServiceInfo.name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='spaceone.api.inventory.v1.CloudServiceInfo.data', index=6,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='spaceone.api.inventory.v1.CloudServiceInfo.metadata', index=7,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reference', full_name='spaceone.api.inventory.v1.CloudServiceInfo.reference', index=8,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tags', full_name='spaceone.api.inventory.v1.CloudServiceInfo.tags', index=9,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='collection_info', full_name='spaceone.api.inventory.v1.CloudServiceInfo.collection_info', index=10,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='spaceone.api.inventory.v1.CloudServiceInfo.project_id', index=11,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.CloudServiceInfo.domain_id', index=12,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region_code', full_name='spaceone.api.inventory.v1.CloudServiceInfo.region_code', index=13,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='spaceone.api.inventory.v1.CloudServiceInfo.created_at', index=14,
number=31, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='spaceone.api.inventory.v1.CloudServiceInfo.updated_at', index=15,
number=32, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deleted_at', full_name='spaceone.api.inventory.v1.CloudServiceInfo.deleted_at', index=16,
number=33, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1527,
serialized_end=2035,
)
_CLOUDSERVICESINFO = _descriptor.Descriptor(
name='CloudServicesInfo',
full_name='spaceone.api.inventory.v1.CloudServicesInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='results', full_name='spaceone.api.inventory.v1.CloudServicesInfo.results', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total_count', full_name='spaceone.api.inventory.v1.CloudServicesInfo.total_count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2037,
serialized_end=2139,
)
_CLOUDSERVICESTATQUERY = _descriptor.Descriptor(
name='CloudServiceStatQuery',
full_name='spaceone.api.inventory.v1.CloudServiceStatQuery',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='spaceone.api.inventory.v1.CloudServiceStatQuery.query', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='domain_id', full_name='spaceone.api.inventory.v1.CloudServiceStatQuery.domain_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_group_id', full_name='spaceone.api.inventory.v1.CloudServiceStatQuery.resource_group_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2141,
serialized_end=2264,
)
_CREATESERVICEREQUEST.fields_by_name['data'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CREATESERVICEREQUEST.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CREATESERVICEREQUEST.fields_by_name['reference'].message_type = _CLOUDSERVICEREFERENCE
_CREATESERVICEREQUEST.fields_by_name['tags'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_UPDATECLOUDSERVICEREQUEST.fields_by_name['data'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_UPDATECLOUDSERVICEREQUEST.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_UPDATECLOUDSERVICEREQUEST.fields_by_name['reference'].message_type = _CLOUDSERVICEREFERENCE
_UPDATECLOUDSERVICEREQUEST.fields_by_name['tags'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CLOUDSERVICEQUERY.fields_by_name['query'].message_type = spaceone_dot_api_dot_core_dot_v1_dot_query__pb2._QUERY
_CLOUDSERVICEINFO.fields_by_name['data'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CLOUDSERVICEINFO.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CLOUDSERVICEINFO.fields_by_name['reference'].message_type = _CLOUDSERVICEREFERENCE
_CLOUDSERVICEINFO.fields_by_name['tags'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CLOUDSERVICEINFO.fields_by_name['collection_info'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_CLOUDSERVICESINFO.fields_by_name['results'].message_type = _CLOUDSERVICEINFO
_CLOUDSERVICESTATQUERY.fields_by_name['query'].message_type = spaceone_dot_api_dot_core_dot_v1_dot_query__pb2._STATISTICSQUERY
DESCRIPTOR.message_types_by_name['CloudServiceReference'] = _CLOUDSERVICEREFERENCE
DESCRIPTOR.message_types_by_name['CreateServiceRequest'] = _CREATESERVICEREQUEST
DESCRIPTOR.message_types_by_name['UpdateCloudServiceRequest'] = _UPDATECLOUDSERVICEREQUEST
DESCRIPTOR.message_types_by_name['PinCloudServiceDataRequest'] = _PINCLOUDSERVICEDATAREQUEST
DESCRIPTOR.message_types_by_name['CloudServiceRequest'] = _CLOUDSERVICEREQUEST
DESCRIPTOR.message_types_by_name['GetCloudServiceRequest'] = _GETCLOUDSERVICEREQUEST
DESCRIPTOR.message_types_by_name['CloudServiceQuery'] = _CLOUDSERVICEQUERY
DESCRIPTOR.message_types_by_name['CloudServiceInfo'] = _CLOUDSERVICEINFO
DESCRIPTOR.message_types_by_name['CloudServicesInfo'] = _CLOUDSERVICESINFO
DESCRIPTOR.message_types_by_name['CloudServiceStatQuery'] = _CLOUDSERVICESTATQUERY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CloudServiceReference = _reflection.GeneratedProtocolMessageType('CloudServiceReference', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSERVICEREFERENCE,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.CloudServiceReference)
})
_sym_db.RegisterMessage(CloudServiceReference)
CreateServiceRequest = _reflection.GeneratedProtocolMessageType('CreateServiceRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATESERVICEREQUEST,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.CreateServiceRequest)
})
_sym_db.RegisterMessage(CreateServiceRequest)
UpdateCloudServiceRequest = _reflection.GeneratedProtocolMessageType('UpdateCloudServiceRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATECLOUDSERVICEREQUEST,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.UpdateCloudServiceRequest)
})
_sym_db.RegisterMessage(UpdateCloudServiceRequest)
PinCloudServiceDataRequest = _reflection.GeneratedProtocolMessageType('PinCloudServiceDataRequest', (_message.Message,), {
'DESCRIPTOR' : _PINCLOUDSERVICEDATAREQUEST,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.PinCloudServiceDataRequest)
})
_sym_db.RegisterMessage(PinCloudServiceDataRequest)
CloudServiceRequest = _reflection.GeneratedProtocolMessageType('CloudServiceRequest', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSERVICEREQUEST,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.CloudServiceRequest)
})
_sym_db.RegisterMessage(CloudServiceRequest)
GetCloudServiceRequest = _reflection.GeneratedProtocolMessageType('GetCloudServiceRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCLOUDSERVICEREQUEST,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.GetCloudServiceRequest)
})
_sym_db.RegisterMessage(GetCloudServiceRequest)
CloudServiceQuery = _reflection.GeneratedProtocolMessageType('CloudServiceQuery', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSERVICEQUERY,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.CloudServiceQuery)
})
_sym_db.RegisterMessage(CloudServiceQuery)
CloudServiceInfo = _reflection.GeneratedProtocolMessageType('CloudServiceInfo', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSERVICEINFO,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.CloudServiceInfo)
})
_sym_db.RegisterMessage(CloudServiceInfo)
CloudServicesInfo = _reflection.GeneratedProtocolMessageType('CloudServicesInfo', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSERVICESINFO,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.CloudServicesInfo)
})
_sym_db.RegisterMessage(CloudServicesInfo)
CloudServiceStatQuery = _reflection.GeneratedProtocolMessageType('CloudServiceStatQuery', (_message.Message,), {
'DESCRIPTOR' : _CLOUDSERVICESTATQUERY,
'__module__' : 'spaceone.api.inventory.v1.cloud_service_pb2'
# @@protoc_insertion_point(class_scope:spaceone.api.inventory.v1.CloudServiceStatQuery)
})
_sym_db.RegisterMessage(CloudServiceStatQuery)
_CLOUDSERVICE = _descriptor.ServiceDescriptor(
name='CloudService',
full_name='spaceone.api.inventory.v1.CloudService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2267,
serialized_end=3371,
methods=[
_descriptor.MethodDescriptor(
name='create',
full_name='spaceone.api.inventory.v1.CloudService.create',
index=0,
containing_service=None,
input_type=_CREATESERVICEREQUEST,
output_type=_CLOUDSERVICEINFO,
serialized_options=b'\202\323\344\223\002\036\"\034/inventory/v1/cloud-services',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='update',
full_name='spaceone.api.inventory.v1.CloudService.update',
index=1,
containing_service=None,
input_type=_UPDATECLOUDSERVICEREQUEST,
output_type=_CLOUDSERVICEINFO,
serialized_options=b'\202\323\344\223\0020\032./inventory/v1/cloud-service/{cloud_service_id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='pin_data',
full_name='spaceone.api.inventory.v1.CloudService.pin_data',
index=2,
containing_service=None,
input_type=_PINCLOUDSERVICEDATAREQUEST,
output_type=_CLOUDSERVICEINFO,
serialized_options=b'\202\323\344\223\0029\0327/inventory/v1/cloud-service/{cloud_service_id}/pin-data',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='delete',
full_name='spaceone.api.inventory.v1.CloudService.delete',
index=3,
containing_service=None,
input_type=_CLOUDSERVICEREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\0020*./inventory/v1/cloud-service/{cloud_service_id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='get',
full_name='spaceone.api.inventory.v1.CloudService.get',
index=4,
containing_service=None,
input_type=_GETCLOUDSERVICEREQUEST,
output_type=_CLOUDSERVICEINFO,
serialized_options=b'\202\323\344\223\0020\022./inventory/v1/cloud-service/{cloud_service_id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='list',
full_name='spaceone.api.inventory.v1.CloudService.list',
index=5,
containing_service=None,
input_type=_CLOUDSERVICEQUERY,
output_type=_CLOUDSERVICESINFO,
serialized_options=b'\202\323\344\223\002E\022\034/inventory/v1/cloud-servicesZ%\"#/inventory/v1/cloud-services/search',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='stat',
full_name='spaceone.api.inventory.v1.CloudService.stat',
index=6,
containing_service=None,
input_type=_CLOUDSERVICESTATQUERY,
output_type=google_dot_protobuf_dot_struct__pb2._STRUCT,
serialized_options=b'\202\323\344\223\002#\"!/inventory/v1/cloud-services/stat',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_CLOUDSERVICE)
DESCRIPTOR.services_by_name['CloudService'] = _CLOUDSERVICE
# @@protoc_insertion_point(module_scope)
| 54.066089
| 5,243
| 0.769373
| 6,470
| 49,903
| 5.626275
| 0.048223
| 0.045052
| 0.075683
| 0.074337
| 0.831877
| 0.796879
| 0.787622
| 0.725867
| 0.699302
| 0.680127
| 0
| 0.042209
| 0.107929
| 49,903
| 922
| 5,244
| 54.124729
| 0.775501
| 0.021882
| 0
| 0.702202
| 1
| 0.008111
| 0.259269
| 0.225574
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00927
| 0
| 0.00927
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13b5ccd05f6c1d1c936ed0eb30ad6fa04036ac14
| 1,060
|
py
|
Python
|
python/src/tests/t_autoflush.py
|
atrsoftgmbh/atrshmlog
|
4ca1a2cc6ff26890a02d74db378e597353f197d3
|
[
"Apache-2.0"
] | null | null | null |
python/src/tests/t_autoflush.py
|
atrsoftgmbh/atrshmlog
|
4ca1a2cc6ff26890a02d74db378e597353f197d3
|
[
"Apache-2.0"
] | null | null | null |
python/src/tests/t_autoflush.py
|
atrsoftgmbh/atrshmlog
|
4ca1a2cc6ff26890a02d74db378e597353f197d3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
#
# $Id:$
#
# We test a bit of the atrshmlog here.
#
# This is for the first starter, so only the basic things.
import sys
import atrshmlog
r = atrshmlog.attach()
print('attach : ' + str(r) + ' : ')
id = atrshmlog.get_autoflush()
print('autoflush : ' + str(id) + ' : ')
oldid = atrshmlog.set_autoflush(1)
print('autoflush : ' + str(oldid) + ' : ')
id = atrshmlog.get_autoflush()
print('autoflush : ' + str(id) + ' : ')
oldid = atrshmlog.set_autoflush(0)
print('autoflush : ' + str(oldid) + ' : ')
id = atrshmlog.get_autoflush()
print('autoflush : ' + str(id) + ' : ')
id = atrshmlog.get_autoflush_process()
print('autoflush : ' + str(id) + ' : ')
oldid = atrshmlog.set_autoflush_process(1)
print('autoflush : ' + str(oldid) + ' : ')
id = atrshmlog.get_autoflush_process()
print('autoflush : ' + str(id) + ' : ')
oldid = atrshmlog.set_autoflush_process(0)
print('autoflush : ' + str(oldid) + ' : ')
id = atrshmlog.get_autoflush_process()
print('autoflush : ' + str(id) + ' : ')
print (' ')
exit(0);
# end of test
| 16.5625
| 58
| 0.621698
| 129
| 1,060
| 4.992248
| 0.27907
| 0.217391
| 0.263975
| 0.214286
| 0.75
| 0.75
| 0.75
| 0.75
| 0.75
| 0.743789
| 0
| 0.006977
| 0.188679
| 1,060
| 63
| 59
| 16.825397
| 0.74186
| 0.121698
| 0
| 0.615385
| 0
| 0
| 0.17679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0.461538
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
b91f2d3fddc4918458d2f820bef661269ede4812
| 24,690
|
py
|
Python
|
sdk/python/pulumi_scaleway/loadbalancer_frontend.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2020-10-16T09:09:05.000Z
|
2022-03-24T21:32:17.000Z
|
sdk/python/pulumi_scaleway/loadbalancer_frontend.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2020-10-29T17:38:13.000Z
|
2022-03-31T13:33:47.000Z
|
sdk/python/pulumi_scaleway/loadbalancer_frontend.py
|
stack72/pulumi-scaleway
|
0242d1f058046f86fe4ea6f106872ecd08d10c3b
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2022-01-13T18:46:32.000Z
|
2022-02-28T03:58:36.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['LoadbalancerFrontendArgs', 'LoadbalancerFrontend']
@pulumi.input_type
class LoadbalancerFrontendArgs:
def __init__(__self__, *,
backend_id: pulumi.Input[str],
inbound_port: pulumi.Input[int],
lb_id: pulumi.Input[str],
acls: Optional[pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]]] = None,
certificate_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
timeout_client: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a LoadbalancerFrontend resource.
:param pulumi.Input[str] backend_id: The load-balancer backend ID this frontend is attached to.
:param pulumi.Input[int] inbound_port: TCP port to listen on the front side.
:param pulumi.Input[str] lb_id: The load-balancer ID this frontend is attached to.
:param pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]] acls: A list of ACL rules to apply to the load-balancer frontend. Defined below.
:param pulumi.Input[str] certificate_id: Certificate ID that should be used by the frontend.
:param pulumi.Input[str] name: The ACL name. If not provided it will be randomly generated.
:param pulumi.Input[str] timeout_client: Maximum inactivity time on the client side. (e.g.: `1s`)
"""
pulumi.set(__self__, "backend_id", backend_id)
pulumi.set(__self__, "inbound_port", inbound_port)
pulumi.set(__self__, "lb_id", lb_id)
if acls is not None:
pulumi.set(__self__, "acls", acls)
if certificate_id is not None:
pulumi.set(__self__, "certificate_id", certificate_id)
if name is not None:
pulumi.set(__self__, "name", name)
if timeout_client is not None:
pulumi.set(__self__, "timeout_client", timeout_client)
@property
@pulumi.getter(name="backendId")
def backend_id(self) -> pulumi.Input[str]:
"""
The load-balancer backend ID this frontend is attached to.
"""
return pulumi.get(self, "backend_id")
@backend_id.setter
def backend_id(self, value: pulumi.Input[str]):
pulumi.set(self, "backend_id", value)
@property
@pulumi.getter(name="inboundPort")
def inbound_port(self) -> pulumi.Input[int]:
"""
TCP port to listen on the front side.
"""
return pulumi.get(self, "inbound_port")
@inbound_port.setter
def inbound_port(self, value: pulumi.Input[int]):
pulumi.set(self, "inbound_port", value)
@property
@pulumi.getter(name="lbId")
def lb_id(self) -> pulumi.Input[str]:
"""
The load-balancer ID this frontend is attached to.
"""
return pulumi.get(self, "lb_id")
@lb_id.setter
def lb_id(self, value: pulumi.Input[str]):
pulumi.set(self, "lb_id", value)
@property
@pulumi.getter
def acls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]]]:
"""
A list of ACL rules to apply to the load-balancer frontend. Defined below.
"""
return pulumi.get(self, "acls")
@acls.setter
def acls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]]]):
pulumi.set(self, "acls", value)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> Optional[pulumi.Input[str]]:
"""
Certificate ID that should be used by the frontend.
"""
return pulumi.get(self, "certificate_id")
@certificate_id.setter
def certificate_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The ACL name. If not provided it will be randomly generated.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="timeoutClient")
def timeout_client(self) -> Optional[pulumi.Input[str]]:
"""
Maximum inactivity time on the client side. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_client")
@timeout_client.setter
def timeout_client(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_client", value)
@pulumi.input_type
class _LoadbalancerFrontendState:
def __init__(__self__, *,
acls: Optional[pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]]] = None,
backend_id: Optional[pulumi.Input[str]] = None,
certificate_id: Optional[pulumi.Input[str]] = None,
inbound_port: Optional[pulumi.Input[int]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
timeout_client: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering LoadbalancerFrontend resources.
:param pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]] acls: A list of ACL rules to apply to the load-balancer frontend. Defined below.
:param pulumi.Input[str] backend_id: The load-balancer backend ID this frontend is attached to.
:param pulumi.Input[str] certificate_id: Certificate ID that should be used by the frontend.
:param pulumi.Input[int] inbound_port: TCP port to listen on the front side.
:param pulumi.Input[str] lb_id: The load-balancer ID this frontend is attached to.
:param pulumi.Input[str] name: The ACL name. If not provided it will be randomly generated.
:param pulumi.Input[str] timeout_client: Maximum inactivity time on the client side. (e.g.: `1s`)
"""
if acls is not None:
pulumi.set(__self__, "acls", acls)
if backend_id is not None:
pulumi.set(__self__, "backend_id", backend_id)
if certificate_id is not None:
pulumi.set(__self__, "certificate_id", certificate_id)
if inbound_port is not None:
pulumi.set(__self__, "inbound_port", inbound_port)
if lb_id is not None:
pulumi.set(__self__, "lb_id", lb_id)
if name is not None:
pulumi.set(__self__, "name", name)
if timeout_client is not None:
pulumi.set(__self__, "timeout_client", timeout_client)
@property
@pulumi.getter
def acls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]]]:
"""
A list of ACL rules to apply to the load-balancer frontend. Defined below.
"""
return pulumi.get(self, "acls")
@acls.setter
def acls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LoadbalancerFrontendAclArgs']]]]):
pulumi.set(self, "acls", value)
@property
@pulumi.getter(name="backendId")
def backend_id(self) -> Optional[pulumi.Input[str]]:
"""
The load-balancer backend ID this frontend is attached to.
"""
return pulumi.get(self, "backend_id")
@backend_id.setter
def backend_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend_id", value)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> Optional[pulumi.Input[str]]:
"""
Certificate ID that should be used by the frontend.
"""
return pulumi.get(self, "certificate_id")
@certificate_id.setter
def certificate_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate_id", value)
@property
@pulumi.getter(name="inboundPort")
def inbound_port(self) -> Optional[pulumi.Input[int]]:
"""
TCP port to listen on the front side.
"""
return pulumi.get(self, "inbound_port")
@inbound_port.setter
def inbound_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "inbound_port", value)
@property
@pulumi.getter(name="lbId")
def lb_id(self) -> Optional[pulumi.Input[str]]:
"""
The load-balancer ID this frontend is attached to.
"""
return pulumi.get(self, "lb_id")
@lb_id.setter
def lb_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lb_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The ACL name. If not provided it will be randomly generated.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="timeoutClient")
def timeout_client(self) -> Optional[pulumi.Input[str]]:
"""
Maximum inactivity time on the client side. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_client")
@timeout_client.setter
def timeout_client(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timeout_client", value)
class LoadbalancerFrontend(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acls: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadbalancerFrontendAclArgs']]]]] = None,
backend_id: Optional[pulumi.Input[str]] = None,
certificate_id: Optional[pulumi.Input[str]] = None,
inbound_port: Optional[pulumi.Input[int]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
timeout_client: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates and manages Scaleway Load-Balancer Frontends. For more information, see [the documentation](https://developers.scaleway.com/en/products/lb/zoned_api).
## Examples
### Basic
```python
import pulumi
import pulumi_scaleway as scaleway
frontend01 = scaleway.LoadbalancerFrontend("frontend01",
lb_id=scaleway_lb["lb01"]["id"],
backend_id=scaleway_lb_backend["backend01"]["id"],
inbound_port=80)
```
## With ACLs
```python
import pulumi
import pulumi_scaleway as scaleway
frontend01 = scaleway.LoadbalancerFrontend("frontend01",
lb_id=scaleway_lb["lb01"]["id"],
backend_id=scaleway_lb_backend["backend01"]["id"],
inbound_port=80,
acls=[
scaleway.LoadbalancerFrontendAclArgs(
name="blacklist wellknwon IPs",
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="allow",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
ip_subnets=[
"192.168.0.1",
"192.168.0.2",
"192.168.10.0/24",
],
),
),
scaleway.LoadbalancerFrontendAclArgs(
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="deny",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
ip_subnets=["51.51.51.51"],
http_filter="regex",
http_filter_values=["^foo*bar$"],
),
),
scaleway.LoadbalancerFrontendAclArgs(
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="allow",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
http_filter="path_begin",
http_filter_values=[
"foo",
"bar",
],
),
),
scaleway.LoadbalancerFrontendAclArgs(
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="allow",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
http_filter="path_begin",
http_filter_values=["hi"],
invert=True,
),
),
])
```
## Import
Load-Balancer frontend can be imported using the `{zone}/{id}`, e.g. bash
```sh
$ pulumi import scaleway:index/loadbalancerFrontend:LoadbalancerFrontend frontend01 fr-par-1/11111111-1111-1111-1111-111111111111
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadbalancerFrontendAclArgs']]]] acls: A list of ACL rules to apply to the load-balancer frontend. Defined below.
:param pulumi.Input[str] backend_id: The load-balancer backend ID this frontend is attached to.
:param pulumi.Input[str] certificate_id: Certificate ID that should be used by the frontend.
:param pulumi.Input[int] inbound_port: TCP port to listen on the front side.
:param pulumi.Input[str] lb_id: The load-balancer ID this frontend is attached to.
:param pulumi.Input[str] name: The ACL name. If not provided it will be randomly generated.
:param pulumi.Input[str] timeout_client: Maximum inactivity time on the client side. (e.g.: `1s`)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: LoadbalancerFrontendArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates and manages Scaleway Load-Balancer Frontends. For more information, see [the documentation](https://developers.scaleway.com/en/products/lb/zoned_api).
## Examples
### Basic
```python
import pulumi
import pulumi_scaleway as scaleway
frontend01 = scaleway.LoadbalancerFrontend("frontend01",
lb_id=scaleway_lb["lb01"]["id"],
backend_id=scaleway_lb_backend["backend01"]["id"],
inbound_port=80)
```
## With ACLs
```python
import pulumi
import pulumi_scaleway as scaleway
frontend01 = scaleway.LoadbalancerFrontend("frontend01",
lb_id=scaleway_lb["lb01"]["id"],
backend_id=scaleway_lb_backend["backend01"]["id"],
inbound_port=80,
acls=[
scaleway.LoadbalancerFrontendAclArgs(
name="blacklist wellknwon IPs",
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="allow",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
ip_subnets=[
"192.168.0.1",
"192.168.0.2",
"192.168.10.0/24",
],
),
),
scaleway.LoadbalancerFrontendAclArgs(
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="deny",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
ip_subnets=["51.51.51.51"],
http_filter="regex",
http_filter_values=["^foo*bar$"],
),
),
scaleway.LoadbalancerFrontendAclArgs(
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="allow",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
http_filter="path_begin",
http_filter_values=[
"foo",
"bar",
],
),
),
scaleway.LoadbalancerFrontendAclArgs(
action=scaleway.LoadbalancerFrontendAclActionArgs(
type="allow",
),
match=scaleway.LoadbalancerFrontendAclMatchArgs(
http_filter="path_begin",
http_filter_values=["hi"],
invert=True,
),
),
])
```
## Import
Load-Balancer frontend can be imported using the `{zone}/{id}`, e.g. bash
```sh
$ pulumi import scaleway:index/loadbalancerFrontend:LoadbalancerFrontend frontend01 fr-par-1/11111111-1111-1111-1111-111111111111
```
:param str resource_name: The name of the resource.
:param LoadbalancerFrontendArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(LoadbalancerFrontendArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acls: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadbalancerFrontendAclArgs']]]]] = None,
backend_id: Optional[pulumi.Input[str]] = None,
certificate_id: Optional[pulumi.Input[str]] = None,
inbound_port: Optional[pulumi.Input[int]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
timeout_client: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = LoadbalancerFrontendArgs.__new__(LoadbalancerFrontendArgs)
__props__.__dict__["acls"] = acls
if backend_id is None and not opts.urn:
raise TypeError("Missing required property 'backend_id'")
__props__.__dict__["backend_id"] = backend_id
__props__.__dict__["certificate_id"] = certificate_id
if inbound_port is None and not opts.urn:
raise TypeError("Missing required property 'inbound_port'")
__props__.__dict__["inbound_port"] = inbound_port
if lb_id is None and not opts.urn:
raise TypeError("Missing required property 'lb_id'")
__props__.__dict__["lb_id"] = lb_id
__props__.__dict__["name"] = name
__props__.__dict__["timeout_client"] = timeout_client
super(LoadbalancerFrontend, __self__).__init__(
'scaleway:index/loadbalancerFrontend:LoadbalancerFrontend',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
acls: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadbalancerFrontendAclArgs']]]]] = None,
backend_id: Optional[pulumi.Input[str]] = None,
certificate_id: Optional[pulumi.Input[str]] = None,
inbound_port: Optional[pulumi.Input[int]] = None,
lb_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
timeout_client: Optional[pulumi.Input[str]] = None) -> 'LoadbalancerFrontend':
"""
Get an existing LoadbalancerFrontend resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadbalancerFrontendAclArgs']]]] acls: A list of ACL rules to apply to the load-balancer frontend. Defined below.
:param pulumi.Input[str] backend_id: The load-balancer backend ID this frontend is attached to.
:param pulumi.Input[str] certificate_id: Certificate ID that should be used by the frontend.
:param pulumi.Input[int] inbound_port: TCP port to listen on the front side.
:param pulumi.Input[str] lb_id: The load-balancer ID this frontend is attached to.
:param pulumi.Input[str] name: The ACL name. If not provided it will be randomly generated.
:param pulumi.Input[str] timeout_client: Maximum inactivity time on the client side. (e.g.: `1s`)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _LoadbalancerFrontendState.__new__(_LoadbalancerFrontendState)
__props__.__dict__["acls"] = acls
__props__.__dict__["backend_id"] = backend_id
__props__.__dict__["certificate_id"] = certificate_id
__props__.__dict__["inbound_port"] = inbound_port
__props__.__dict__["lb_id"] = lb_id
__props__.__dict__["name"] = name
__props__.__dict__["timeout_client"] = timeout_client
return LoadbalancerFrontend(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def acls(self) -> pulumi.Output[Optional[Sequence['outputs.LoadbalancerFrontendAcl']]]:
"""
A list of ACL rules to apply to the load-balancer frontend. Defined below.
"""
return pulumi.get(self, "acls")
@property
@pulumi.getter(name="backendId")
def backend_id(self) -> pulumi.Output[str]:
"""
The load-balancer backend ID this frontend is attached to.
"""
return pulumi.get(self, "backend_id")
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> pulumi.Output[Optional[str]]:
"""
Certificate ID that should be used by the frontend.
"""
return pulumi.get(self, "certificate_id")
@property
@pulumi.getter(name="inboundPort")
def inbound_port(self) -> pulumi.Output[int]:
"""
TCP port to listen on the front side.
"""
return pulumi.get(self, "inbound_port")
@property
@pulumi.getter(name="lbId")
def lb_id(self) -> pulumi.Output[str]:
"""
The load-balancer ID this frontend is attached to.
"""
return pulumi.get(self, "lb_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The ACL name. If not provided it will be randomly generated.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="timeoutClient")
def timeout_client(self) -> pulumi.Output[Optional[str]]:
"""
Maximum inactivity time on the client side. (e.g.: `1s`)
"""
return pulumi.get(self, "timeout_client")
| 41.356784
| 182
| 0.598947
| 2,628
| 24,690
| 5.42618
| 0.088661
| 0.08331
| 0.065778
| 0.060168
| 0.862272
| 0.847756
| 0.838429
| 0.823913
| 0.810168
| 0.791234
| 0
| 0.010833
| 0.297124
| 24,690
| 596
| 183
| 41.426175
| 0.810879
| 0.403362
| 0
| 0.724528
| 1
| 0
| 0.108608
| 0.027881
| 0
| 0
| 0
| 0
| 0
| 1
| 0.158491
| false
| 0.003774
| 0.026415
| 0
| 0.279245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b92a0b1f3222c8e2c4a8f441f7c23b827da34ed9
| 38,453
|
py
|
Python
|
tests/mock_github.py
|
mogul/github-issue-lifecycle
|
c31a753b904799c57a7468bf590a280e8be3bb6f
|
[
"CC0-1.0"
] | 1
|
2017-06-08T11:37:21.000Z
|
2017-06-08T11:37:21.000Z
|
tests/mock_github.py
|
mogul/github-issue-lifecycle
|
c31a753b904799c57a7468bf590a280e8be3bb6f
|
[
"CC0-1.0"
] | 2
|
2016-10-20T20:39:17.000Z
|
2016-10-20T20:45:50.000Z
|
tests/mock_github.py
|
mogul/github-issue-lifecycle
|
c31a753b904799c57a7468bf590a280e8be3bb6f
|
[
"CC0-1.0"
] | 3
|
2016-10-20T20:32:06.000Z
|
2021-02-15T10:00:02.000Z
|
import re
import requests
events_url_pattern = re.compile(r'issues/(\d+)/events')
def requests_get_stub(*args, **kwargs):
response = requests.get.return_value
if '/doesnot/exist/' in args[0]:
response.status_code = 404
response.ok = False
response.text = 'Repo not found'
return response
events_for_issue_num = events_url_pattern.search(args[0])
if events_for_issue_num:
issue_num = int(events_for_issue_num.group(1))
response.json.return_value = events_json[issue_num]
else:
response.json.return_value = issues_json
response.ok = True
response.status_code = 200
return response
issues_json = [
{'assignee':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'body': '# 29 minutes\r\n'
'*From: Aaron Snow*\r\n'
'\r\n'
'I want to relate a great little thing that happened here at [18th '
'and F](http://gsa.gov/) yesterday.\r\n',
'closed_at': '2014-03-26T04:01:54Z',
'comments': 3,
'comments_url':
'https://api.github.com/repos/18F/blog-drafts/issues/4/comments',
'created_at': '2014-03-25T07:07:18Z',
'events_url':
'https://api.github.com/repos/18F/blog-drafts/issues/4/events',
'html_url': 'https://github.com/18F/blog-drafts/issues/4',
'id': 30101124,
'labels': [
{'color': 'bfe5bf',
'name': 'how we work',
'url':
'https://api.github.com/repos/18F/blog-drafts/labels/how%20we%20work'
}
],
'labels_url':
'https://api.github.com/repos/18F/blog-drafts/issues/4/labels{/name}',
'locked': False,
'milestone':
{'closed_at': None,
'closed_issues': 140,
'created_at': '2014-03-25T06:52:47Z',
'creator':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'description': 'posts that have been written, edited, and '
'approved for posting',
'due_on': None,
'html_url': 'https://github.com/18F/blog-drafts/milestones/approved',
'id': 608934,
'labels_url':
'https://api.github.com/repos/18F/blog-drafts/milestones/2/labels',
'number': 2,
'open_issues': 0,
'state': 'open',
'title': 'approved',
'updated_at': '2015-12-09T16:48:40Z',
'url': 'https://api.github.com/repos/18F/blog-drafts/milestones/2'},
'number': 4,
'state': 'closed',
'title': 'Story about FBOpen and m.gsa.gov',
'updated_at': '2014-03-29T05:21:10Z',
'url': 'https://api.github.com/repos/18F/blog-drafts/issues/4',
'user':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'}},
{'assignee':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'body':
'Blog post about the Github AMA this week. Got questions about 18F '
'and the PIF program? Come ask us!',
'closed_at': '2014-04-01T20:00:44Z',
'comments': 2,
'comments_url':
'https://api.github.com/repos/18F/blog-drafts/issues/17/comments',
'created_at': '2014-04-01T18:43:14Z',
'events_url':
'https://api.github.com/repos/18F/blog-drafts/issues/17/events',
'html_url': 'https://github.com/18F/blog-drafts/issues/17',
'id': 30625911,
'labels': [],
'labels_url':
'https://api.github.com/repos/18F/blog-drafts/issues/17/labels{/name}',
'locked': False,
'milestone':
{'closed_at': None,
'closed_issues': 140,
'created_at': '2014-03-25T06:52:47Z',
'creator':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'description': 'posts that have been written, edited, and '
'approved for posting',
'due_on': None,
'html_url': 'https://github.com/18F/blog-drafts/milestones/approved',
'id': 608934,
'labels_url':
'https://api.github.com/repos/18F/blog-drafts/milestones/2/labels',
'number': 2,
'open_issues': 0,
'state': 'open',
'title': 'approved',
'updated_at': '2015-12-09T16:48:40Z',
'url': 'https://api.github.com/repos/18F/blog-drafts/milestones/2'},
'number': 17,
'state': 'closed',
'title': 'Github AMA',
'updated_at': '2014-04-01T20:00:44Z',
'url': 'https://api.github.com/repos/18F/blog-drafts/issues/17',
'user':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'}}
]
events_json = {
4:
[{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-25T07:07:23Z',
'event': 'milestoned',
'id': 105106022,
'milestone': {'title': 'posted'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105106022'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-25T07:09:14Z',
'event': 'labeled',
'id': 105106349,
'label': {'color': 'f7c6c7',
'name': 'fbopen'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105106349'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/565152?v=3',
'events_url': 'https://api.github.com/users/aaronsnow/events{/privacy}',
'followers_url': 'https://api.github.com/users/aaronsnow/followers',
'following_url':
'https://api.github.com/users/aaronsnow/following{/other_user}',
'gists_url': 'https://api.github.com/users/aaronsnow/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/aaronsnow',
'id': 565152,
'login': 'aaronsnow',
'organizations_url': 'https://api.github.com/users/aaronsnow/orgs',
'received_events_url':
'https://api.github.com/users/aaronsnow/received_events',
'repos_url': 'https://api.github.com/users/aaronsnow/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/aaronsnow/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/aaronsnow/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/aaronsnow'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-25T20:18:21Z',
'event': 'closed',
'id': 105389265,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105389265'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1328699?v=3',
'events_url': 'https://api.github.com/users/amoose/events{/privacy}',
'followers_url': 'https://api.github.com/users/amoose/followers',
'following_url':
'https://api.github.com/users/amoose/following{/other_user}',
'gists_url': 'https://api.github.com/users/amoose/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/amoose',
'id': 1328699,
'login': 'amoose',
'organizations_url': 'https://api.github.com/users/amoose/orgs',
'received_events_url':
'https://api.github.com/users/amoose/received_events',
'repos_url': 'https://api.github.com/users/amoose/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/amoose/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/amoose/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/amoose'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T01:23:19Z',
'event': 'reopened',
'id': 105478781,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105478781'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'assignee':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T03:18:05Z',
'event': 'assigned',
'id': 105495794,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105495794'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T03:59:55Z',
'event': 'milestoned',
'id': 105501905,
'milestone': {'title': 'posted'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105501905'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T03:59:55Z',
'event': 'demilestoned',
'id': 105501906,
'milestone': {'title': 'posted'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105501906'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1328699?v=3',
'events_url': 'https://api.github.com/users/amoose/events{/privacy}',
'followers_url': 'https://api.github.com/users/amoose/followers',
'following_url':
'https://api.github.com/users/amoose/following{/other_user}',
'gists_url': 'https://api.github.com/users/amoose/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/amoose',
'id': 1328699,
'login': 'amoose',
'organizations_url': 'https://api.github.com/users/amoose/orgs',
'received_events_url':
'https://api.github.com/users/amoose/received_events',
'repos_url': 'https://api.github.com/users/amoose/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/amoose/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/amoose/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/amoose'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T04:01:54Z',
'event': 'mentioned',
'id': 105502170,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105502170'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1328699?v=3',
'events_url': 'https://api.github.com/users/amoose/events{/privacy}',
'followers_url': 'https://api.github.com/users/amoose/followers',
'following_url':
'https://api.github.com/users/amoose/following{/other_user}',
'gists_url': 'https://api.github.com/users/amoose/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/amoose',
'id': 1328699,
'login': 'amoose',
'organizations_url': 'https://api.github.com/users/amoose/orgs',
'received_events_url':
'https://api.github.com/users/amoose/received_events',
'repos_url': 'https://api.github.com/users/amoose/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/amoose/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/amoose/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/amoose'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T04:01:54Z',
'event': 'subscribed',
'id': 105502171,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105502171'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T04:01:54Z',
'event': 'closed',
'id': 105502172,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105502172'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-26T04:52:53Z',
'event': 'labeled',
'id': 105508695,
'label': {'color': '009800',
'name': 'how we work'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/105508695'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-29T05:19:07Z',
'event': 'demilestoned',
'id': 106705064,
'milestone': {'title': 'posted'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/106705064'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-03-29T05:21:10Z',
'event': 'milestoned',
'id': 106705168,
'milestone': {'title': 'approved'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/106705168'}],
17:
[{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-04-01T18:43:14Z',
'event': 'milestoned',
'id': 107595720,
'milestone': {'title': 'idea'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/107595720'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'assignee':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-04-01T18:43:14Z',
'event': 'assigned',
'id': 107595721,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/107595721'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-04-01T18:43:21Z',
'event': 'milestoned',
'id': 107595759,
'milestone': {'title': 'draft'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/107595759'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-04-01T18:43:21Z',
'event': 'demilestoned',
'id': 107595760,
'milestone': {'title': 'idea'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/107595760'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-04-01T20:00:27Z',
'event': 'milestoned',
'id': 107628074,
'milestone': {'title': 'approved'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/107628074'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-04-01T20:00:27Z',
'event': 'demilestoned',
'id': 107628075,
'milestone': {'title': 'draft'},
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/107628075'},
{'actor':
{'avatar_url': 'https://avatars.githubusercontent.com/u/1841120?v=3',
'events_url': 'https://api.github.com/users/quepol/events{/privacy}',
'followers_url': 'https://api.github.com/users/quepol/followers',
'following_url':
'https://api.github.com/users/quepol/following{/other_user}',
'gists_url': 'https://api.github.com/users/quepol/gists{/gist_id}',
'gravatar_id': '',
'html_url': 'https://github.com/quepol',
'id': 1841120,
'login': 'quepol',
'organizations_url': 'https://api.github.com/users/quepol/orgs',
'received_events_url':
'https://api.github.com/users/quepol/received_events',
'repos_url': 'https://api.github.com/users/quepol/repos',
'site_admin': False,
'starred_url':
'https://api.github.com/users/quepol/starred{/owner}{/repo}',
'subscriptions_url':
'https://api.github.com/users/quepol/subscriptions',
'type': 'User',
'url': 'https://api.github.com/users/quepol'},
'commit_id': None,
'commit_url': None,
'created_at': '2014-04-01T20:00:44Z',
'event': 'closed',
'id': 107628198,
'url':
'https://api.github.com/repos/18F/blog-drafts/issues/events/107628198'}]
}
| 43.449718
| 79
| 0.610928
| 4,649
| 38,453
| 4.92579
| 0.052484
| 0.130306
| 0.150349
| 0.232358
| 0.930873
| 0.930742
| 0.926201
| 0.920306
| 0.915328
| 0.906026
| 0
| 0.043767
| 0.188334
| 38,453
| 884
| 80
| 43.498869
| 0.689949
| 0
| 0
| 0.848
| 0
| 0.033143
| 0.675708
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001143
| false
| 0
| 0.002286
| 0
| 0.005714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b978ebef515116570b2de3de0b095c38e443e199
| 38,175
|
py
|
Python
|
src/layers.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
src/layers.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
src/layers.py
|
Sharut/Bilinear-Linformer
|
025e2ffefe8bb57092cd4a912ea66ec3d65997e0
|
[
"MIT"
] | null | null | null |
#
# For licensing see accompanying LICENSE file.
# Copyright (C) 2019 Apple Inc. All Rights Reserved.
#
'''Capsule in PyTorch
TBD
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import math
from .bilinear_sparse_routing import BilinearSparseRouting, BilinearRouting, DynamicBilinearRouting
from .linformer import LinformerProjectionKernel, BilinearProjectionWithEmbeddings
#### Simple Backbone ####
class simple_backbone(nn.Module):
def __init__(self, cl_input_channels,cl_num_filters,cl_filter_size,
cl_stride,cl_padding):
super(simple_backbone, self).__init__()
self.pre_caps = nn.Sequential(
nn.Conv2d(in_channels=cl_input_channels,
out_channels=cl_num_filters,
kernel_size=cl_filter_size,
stride=cl_stride,
padding=cl_padding),
nn.ReLU(),
)
def forward(self, x):
out = self.pre_caps(x) # x is an image
return out
#### ResNet Backbone ####
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class resnet_backbone_cifar(nn.Module):
def __init__(self, cl_input_channels, cl_num_filters,
cl_stride):
super(resnet_backbone_cifar, self).__init__()
self.in_planes = 64
def _make_layer(block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
self.pre_caps = nn.Sequential(
nn.Conv2d(in_channels=cl_input_channels,
out_channels=64,
kernel_size=3,
stride=1,
padding=1,
bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
_make_layer(block=BasicBlock, planes=64, num_blocks=3, stride=1), # num_blocks=2 or 3
_make_layer(block=BasicBlock, planes=cl_num_filters, num_blocks=4, stride=cl_stride), # num_blocks=2 or 4
)
def forward(self, x):
out = self.pre_caps(x) # x is an image
return out
#Imagenet backbone
class resnet_backbone_imagenet(nn.Module):
def __init__(self, cl_input_channels, cl_num_filters,
cl_stride):
super(resnet_backbone_imagenet, self).__init__()
self.in_planes = 64
def _make_layer(block, planes, num_blocks, stride):
# strides = [stride] + [1]*(num_blocks-1)
strides = [stride]*3 + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
self.pre_caps = nn.Sequential(
nn.Conv2d(in_channels=cl_input_channels,
out_channels=64,
kernel_size=3,
stride=1,
padding=1,
bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
_make_layer(block=BasicBlock, planes=64, num_blocks=3, stride=1), # num_blocks=2 or 3
# _make_layer(block=BasicBlock, planes=128, num_blocks=4, stride=cl_stride), # num_blocks=2 or 4
_make_layer(block=BasicBlock, planes=cl_num_filters, num_blocks=4, stride=cl_stride), # num_blocks=2 or 4
# _make_layer(block=BasicBlock, planes=512, num_blocks=2, stride=cl_stride), # num_blocks=2 or 4
)
def forward(self, x):
out = self.pre_caps(x) # x is an image
# print("Resnet backbone shape: ", out.shape)
return out
###
# Explained einsum
'''
https://stackoverflow.com/questions/26089893/understanding-numpys-einsum
torch.einsum('i,ij->i', A, B)
1. A has one axis; we've labelled it i. And B has two axes;
we've labelled axis 0 as i and axis 1 as j.
2. By repeating the label i in both input arrays, we are telling
einsum that these two axes should be multiplied together.
In other words, we're multiplying array A with each column of array B,
just like A[:, np.newaxis] * B does.
3. Notice that j does not appear as a label in our desired output;
we've just used i (we want to end up with a 1D array).
By omitting the label, we're telling einsum to sum along this axis.
In other words, we're summing the rows of the products, just like .sum(axis=1) does.
'''
#### Capsule Layer ####
class CapsuleFC(nn.Module):
r"""Applies as a capsule fully-connected layer.
TBD
"""
'''
Same as CapsuleConv
except that kernal size=1 everywhere.
'''
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules, matrix_pose, dp):
super(CapsuleFC, self).__init__()
self.in_n_capsules = in_n_capsules
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.matrix_pose = matrix_pose
# Matrix form of Hilton
if matrix_pose:
self.sqrt_d = int(np.sqrt(self.in_d_capsules))
self.weight_init_const = np.sqrt(out_n_capsules/(self.sqrt_d*in_n_capsules))
self.w = nn.Parameter(self.weight_init_const* \
torch.randn(in_n_capsules, self.sqrt_d, self.sqrt_d, out_n_capsules))
# Vector form of Hilton
else:
self.weight_init_const = np.sqrt(out_n_capsules/(in_d_capsules*in_n_capsules))
self.w = nn.Parameter(self.weight_init_const* \
torch.randn(in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules))
self.dropout_rate = dp
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.drop = nn.Dropout(self.dropout_rate)
self.scale = 1. / (out_d_capsules ** 0.5)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, matrix_pose={}, \
weight_init_const={}, dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules, self.matrix_pose,
self.weight_init_const, self.dropout_rate
)
def forward(self, input, num_iter, next_capsule_value=None):
# b: batch size
# n: num of capsules in current layer
# a: dim of capsules in current layer
# m: num of capsules in next layer
# d: dim of capsules in next layer
if len(input.shape) == 5:
input = input.permute(0, 4, 1, 2, 3)
input = input.contiguous().view(input.shape[0], input.shape[1], -1)
input = input.permute(0,2,1)
if self.matrix_pose:
w = self.w # nxdm
_input = input.view(input.shape[0], input.shape[1], self.sqrt_d, self.sqrt_d) # bnax
else:
w = self.w
if next_capsule_value is None:
# next_capsule_vale=None at 1st Iteration
# query key == r_{i,j} (routing probabilities)
query_key = torch.zeros(self.in_n_capsules, self.out_n_capsules).type_as(input)
query_key = F.softmax(query_key, dim=1)
if self.matrix_pose:
# Einsum: computing multilinear expressions (i.e. sums of products) using the Einstein summation convention.
next_capsule_value = torch.einsum('nm, bnax, nxdm->bmad', query_key, _input, w)
else:
next_capsule_value = torch.einsum('nm, bna, namd->bmd', query_key, input, w)
else:
if self.matrix_pose:
next_capsule_value = next_capsule_value.view(next_capsule_value.shape[0],
next_capsule_value.shape[1], self.sqrt_d, self.sqrt_d)
# _query_key == agreement vector ( a_{i,j})
_query_key = torch.einsum('bnax, nxdm, bmad->bnm', _input, w, next_capsule_value)
else:
_query_key = torch.einsum('bna, namd, bmd->bnm', input, w, next_capsule_value)
# New routing probabilities
_query_key.mul_(self.scale)
query_key = F.softmax(_query_key, dim=2)
query_key = query_key / (torch.sum(query_key, dim=2, keepdim=True) + 1e-10)
if self.matrix_pose:
# Use new routing values, to update state of parent capsule
next_capsule_value = torch.einsum('bnm, bnax, nxdm->bmad', query_key, _input,
w)
else:
next_capsule_value = torch.einsum('bnm, bna, namd->bmd', query_key, input,
w)
# Apply dropout
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
if self.matrix_pose:
next_capsule_value = next_capsule_value.view(next_capsule_value.shape[0],
next_capsule_value.shape[1], self.out_d_capsules)
# Apply layer Norm
next_capsule_value = self.nonlinear_act(next_capsule_value)
else:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
#
class CapsuleCONV(nn.Module):
r"""Applies as a capsule convolutional layer.
TBD
"""
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules,
kernel_size, stride, matrix_pose, dp, coordinate_add=False):
super(CapsuleCONV, self).__init__()
self.in_n_capsules = in_n_capsules
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.kernel_size = kernel_size
self.stride = stride
self.matrix_pose = matrix_pose
self.coordinate_add = coordinate_add
if matrix_pose:
self.sqrt_d = int(np.sqrt(self.in_d_capsules))
self.weight_init_const = np.sqrt(out_n_capsules/(self.sqrt_d*in_n_capsules*kernel_size*kernel_size))
self.w = nn.Parameter(self.weight_init_const*torch.randn(kernel_size, kernel_size,
in_n_capsules, self.sqrt_d, self.sqrt_d, out_n_capsules))
else:
self.weight_init_const = np.sqrt(out_n_capsules/(in_d_capsules*in_n_capsules*kernel_size*kernel_size))
self.w = nn.Parameter(self.weight_init_const*torch.randn(kernel_size, kernel_size,
in_n_capsules, in_d_capsules, out_n_capsules,
out_d_capsules))
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.dropout_rate = dp
self.drop = nn.Dropout(self.dropout_rate)
self.scale = 1. / (out_d_capsules ** 0.5)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, \
kernel_size={}, stride={}, coordinate_add={}, matrix_pose={}, weight_init_const={}, \
dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules,
self.kernel_size, self.stride, self.coordinate_add, self.matrix_pose, self.weight_init_const,
self.dropout_rate
)
def input_expansion(self, input):
# input has size [batch x num_of_capsule x height x width x capsule_dimension]
# unfold(dimension, size, step) → Tensor: Unfold Extracts sliding local blocks along given dim
# extracts kernel patches over complete height and width
unfolded_input = input.unfold(2,size=self.kernel_size,step=self.stride).unfold(3,size=self.kernel_size,step=self.stride)
unfolded_input = unfolded_input.permute([0,1,5,6,2,3,4])
# output has size [batch x num_of_capsule x kernel_size x kernel_size x h_out x w_out x capsule_dimension]
return unfolded_input
def forward(self, input, num_iter, next_capsule_value=None):
# k,l: kernel size
# h,w: output width and length
# b: batch size
# n: num of capsules in current layer
# a: dim of capsules in current layer
# m: num of capsules in next layer
# d: dim of capsules in next layer
# This converts (b,32,14,14,16) --> (b,32,3,3,7,7,16) (3X3 patches, 7 in number along both height and width)
inputs = self.input_expansion(input)
# print("Expansion: ",input.shape, inputs.shape)
if self.matrix_pose:
# W is pose of capsules of layer L
# Input is capsule of layer L (p_{L})
w = self.w # klnxdm
# Converts (b,32,3,3,7,7,16) --> (b,32,3,3,7,7,4,4)
_inputs = inputs.view(inputs.shape[0], inputs.shape[1], inputs.shape[2], inputs.shape[3],\
inputs.shape[4], inputs.shape[5], self.sqrt_d, self.sqrt_d) # bnklmhax
# print(_inputs.shape)
else:
w = self.w
if next_capsule_value is None:
# Routing probabilities in 1st iteration
query_key = torch.zeros(self.in_n_capsules, self.kernel_size, self.kernel_size,
self.out_n_capsules).type_as(inputs) # nklm
query_key = F.softmax(query_key, dim=3) # softmax on output number of capsules
# print("Query :",query_key.shape)
# print("w :",w.shape)
# print("input :",_inputs.shape)
if self.matrix_pose:
# a,x are sqrt_d if matrix pose and not vector pose
# This performs convolution as well and attention both
# next capsule shape is (b,32,7,7,16) just like original input
'''
for all b:
for all m:
for all h:
for all w:
for all a:
for all d:
for all n: (summing over capsules of layer L)
for all k:
for all l: (over all patches)
multiply input pose [(a,x)==(4,4)] with w [(x,d)==(4,4)]
'''
next_capsule_value = torch.einsum('nklm, bnklhwax, klnxdm->bmhwad', query_key,
_inputs, w)
else:
# Vectorised implementation
next_capsule_value = torch.einsum('nklm, bnklhwa, klnamd->bmhwd', query_key,
inputs, w)
else:
if self.matrix_pose:
# break 16 to (4,4) pose
next_capsule_value = next_capsule_value.view(next_capsule_value.shape[0],\
next_capsule_value.shape[1], next_capsule_value.shape[2],\
next_capsule_value.shape[3], self.sqrt_d, self.sqrt_d)
# w=(3,3,32,4,4,m), _input=(b,32,3,3,7,7,4,4) , next_capsule_value= (b,m,7,7,4,4)
_query_key = torch.einsum('bnklhwax, klnxdm, bmhwad->bnklmhw', _inputs, w,
next_capsule_value)
else:
_query_key = torch.einsum('bnklhwa, klnamd, bmhwd->bnklmhw', inputs, w,
next_capsule_value)
# Compute new routing probabilities
_query_key.mul_(self.scale)
query_key = F.softmax(_query_key, dim=4)
query_key = query_key / (torch.sum(query_key, dim=4, keepdim=True) + 1e-10)
if self.matrix_pose:
# Update parent parent using new routing probabilties
next_capsule_value = torch.einsum('bnklmhw, bnklhwax, klnxdm->bmhwad', query_key,
_inputs, w)
# print("others iter : ", next_capsule_value.shape)
else:
next_capsule_value = torch.einsum('bnklmhw, bnklhwa, klnamd->bmhwd', query_key,
inputs, w)
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
if self.matrix_pose:
# Correct size of parent capsule
next_capsule_value = next_capsule_value.view(next_capsule_value.shape[0],\
next_capsule_value.shape[1], next_capsule_value.shape[2],\
next_capsule_value.shape[3], self.out_d_capsules)
# Layer Norm
next_capsule_value = self.nonlinear_act(next_capsule_value)
else:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
#### Capsule Layers with the proposed bilinear sparse routing ####
class SACapsuleFC(nn.Module):
r"""Applies as a capsule fully-connected layer.
TBD
"""
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules, matrix_pose, dp):
super(SACapsuleFC, self).__init__()
self.in_n_capsules = in_n_capsules # This is n_caps * h_in * w_in
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.matrix_pose = matrix_pose
self.dropout_rate = dp
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.drop = nn.Dropout(self.dropout_rate)
self.scale = 1. / (out_d_capsules ** 0.5)
self.sinhkorn_caps_attn = BilinearSparseRouting(next_bucket_size=self.out_n_capsules, in_n_capsules=in_n_capsules, in_d_capsules=in_d_capsules, out_n_capsules=out_n_capsules,
out_d_capsules=out_d_capsules, matrix_pose=self.matrix_pose, layer_type='FC', kernel_size=1,
temperature = 0.75,
non_permutative = True, sinkhorn_iter = 7, n_sortcut = 2, dropout = 0., current_bucket_size = self.in_n_capsules//8,
use_simple_sort_net = False)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, matrix_pose={}, \
dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules, self.matrix_pose,
self.dropout_rate
)
def forward(self, input, num_iter, next_capsule_value=None):
# b: batch size
# n: num of capsules in current layer
# a: dim of capsules in current layer
# m: num of capsules in next layer
# d: dim of capsules in next layer
print("Input ", input.shape)
if len(input.shape) == 5:
input = input.permute(0, 4, 1, 2, 3)
input = input.contiguous().view(input.shape[0], input.shape[1], -1)
input = input.permute(0,2,1)
print("Transformed ", input.shape)
batch_size = input.shape[0]
next_capsule_value = self.sinhkorn_caps_attn(current_pose=input, h_out=1, w_out=1, next_pose=next_capsule_value)
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
class SACapsuleCONV(nn.Module):
r"""Applies as a capsule convolutional layer.
TBD
"""
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules,
kernel_size, stride, matrix_pose, dp, padding=None, coordinate_add=False):
super(SACapsuleCONV, self).__init__()
self.in_n_capsules = in_n_capsules
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.kernel_size = kernel_size
self.stride = stride
self.matrix_pose = matrix_pose
self.coordinate_add = coordinate_add
self.padding = padding
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.dropout_rate = dp
self.drop = nn.Dropout(self.dropout_rate)
self.sinhkorn_caps_attn = BilinearSparseRouting(next_bucket_size=self.out_n_capsules, in_n_capsules=in_n_capsules, in_d_capsules=in_d_capsules, out_n_capsules=out_n_capsules,
out_d_capsules=out_d_capsules, matrix_pose=self.matrix_pose, layer_type='conv', kernel_size=kernel_size,
temperature = 0.75,
non_permutative = True, sinkhorn_iter = 7, n_sortcut = 1, dropout = 0., current_bucket_size = self.in_n_capsules,
use_simple_sort_net = False)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, \
kernel_size={}, stride={}, coordinate_add={}, matrix_pose={}, \
dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules,
self.kernel_size, self.stride, self.coordinate_add, self.matrix_pose,
self.dropout_rate
)
def input_expansion(self, input):
# input has size [batch x num_of_capsule x height x width x x capsule_dimension]
if self.padding:
input = input.permute([0,1,4,2,3]) #For padding h,w
if not self.padding%1:
input = F.pad(input, [self.padding, self.padding, self.padding, self.padding]) #TODO: Padding to maintain same size, change so that caps dim not padded
else:
input = F.pad(input, [math.ceil(self.padding), math.floor(self.padding), math.ceil(self.padding), math.floor(self.padding)]) #TODO: Padding to maintain same size, change so that caps dim not padded
input = input.permute([0,1,3,4,2])
unfolded_input = input.unfold(2,size=self.kernel_size,step=self.stride).unfold(3,size=self.kernel_size,step=self.stride)
unfolded_input = unfolded_input.permute([0,1,5,6,2,3,4])
# output has size [batch x num_of_capsule x kernel_size x kernel_size x h_out x w_out x capsule_dimension]
return unfolded_input
def forward(self, input, num_iter, next_capsule_value=None):
# k,l: kernel size
# h,w: output width and length
inputs = self.input_expansion(input)
batch_size = inputs.shape[0]
h_out = inputs.shape[4]
w_out = inputs.shape[5]
next_capsule_value = self.sinhkorn_caps_attn(current_pose=inputs, h_out=h_out, w_out=w_out, next_pose=next_capsule_value)
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
#### Capsule Layers with the proposed bilinear routing without sinkhorn - Ablation study ####
class BACapsuleFC(nn.Module):
r"""Applies as a capsule fully-connected layer.
TBD
"""
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules, matrix_pose, dp):
super(BACapsuleFC, self).__init__()
self.in_n_capsules = in_n_capsules
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.matrix_pose = matrix_pose
self.dropout_rate = dp
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.drop = nn.Dropout(self.dropout_rate)
self.scale = 1. / (out_d_capsules ** 0.5)
self.bilinear_attn = BilinearRouting(next_bucket_size=self.out_n_capsules, in_n_capsules=in_n_capsules, in_d_capsules=in_d_capsules, out_n_capsules=out_n_capsules,
out_d_capsules=out_d_capsules, matrix_pose=self.matrix_pose, layer_type='FC', kernel_size=1,
temperature = 0.75,
non_permutative = True, sinkhorn_iter = 7, n_sortcut = 2, dropout = 0., current_bucket_size = self.in_n_capsules//8,
use_simple_sort_net = False)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, matrix_pose={}, \
dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules, self.matrix_pose,
self.dropout_rate
)
def forward(self, input, num_iter, next_capsule_value=None):
# b: batch size
# n: num of capsules in current layer
# a: dim of capsules in current layer
# m: num of capsules in next layer
# d: dim of capsules in next layer
if len(input.shape) == 5:
input = input.permute(0, 4, 1, 2, 3)
input = input.contiguous().view(input.shape[0], input.shape[1], -1)
input = input.permute(0,2,1)
batch_size = input.shape[0]
next_capsule_value = self.bilinear_attn(current_pose=input, h_out=1, w_out=1, next_pose=next_capsule_value)
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
class BACapsuleCONV(nn.Module):
r"""Applies as a capsule convolutional layer.
TBD
"""
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules,
kernel_size, stride, matrix_pose, dp, padding=None, coordinate_add=False):
super(BACapsuleCONV, self).__init__()
self.in_n_capsules = in_n_capsules
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.kernel_size = kernel_size
self.stride = stride
self.matrix_pose = matrix_pose
self.coordinate_add = coordinate_add
self.padding = padding
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.dropout_rate = dp
self.drop = nn.Dropout(self.dropout_rate)
self.bilinear_attn = BilinearRouting(next_bucket_size=self.out_n_capsules, in_n_capsules=in_n_capsules, in_d_capsules=in_d_capsules, out_n_capsules=out_n_capsules,
out_d_capsules=out_d_capsules, matrix_pose=self.matrix_pose, layer_type='conv', kernel_size=kernel_size,
temperature = 0.75,
non_permutative = True, sinkhorn_iter = 7, n_sortcut = 1, dropout = 0., current_bucket_size = self.in_n_capsules,
use_simple_sort_net = False)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, \
kernel_size={}, stride={}, coordinate_add={}, matrix_pose={}, \
dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules,
self.kernel_size, self.stride, self.coordinate_add, self.matrix_pose,
self.dropout_rate
)
def input_expansion(self, input):
# input has size [batch x num_of_capsule x height x width x x capsule_dimension]
if self.padding:
input = input.permute([0,1,4,2,3]) #For padding h,w
if not self.padding%1:
input = F.pad(input, [self.padding, self.padding, self.padding, self.padding]) #TODO: Padding to maintain same size, change so that caps dim not padded
else:
input = F.pad(input, [math.ceil(self.padding), math.floor(self.padding), math.ceil(self.padding), math.floor(self.padding)]) #TODO: Padding to maintain same size, change so that caps dim not padded
input = input.permute([0,1,3,4,2])
unfolded_input = input.unfold(2,size=self.kernel_size,step=self.stride).unfold(3,size=self.kernel_size,step=self.stride)
unfolded_input = unfolded_input.permute([0,1,5,6,2,3,4])
# output has size [batch x num_of_capsule x kernel_size x kernel_size x h_out x w_out x capsule_dimension]
return unfolded_input
def forward(self, input, num_iter, next_capsule_value=None):
# k,l: kernel size
# h,w: output width and length
inputs = self.input_expansion(input)
batch_size = inputs.shape[0]
h_out = inputs.shape[4]
w_out = inputs.shape[5]
next_capsule_value = self.bilinear_attn(current_pose=inputs, h_out=h_out, w_out=w_out, next_pose=next_capsule_value)
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
#### Capsule Layers with the linformer projections and unfold operstions
class LACapsuleFC(nn.Module):
r"""Applies as a capsule fully-connected layer.
TBD
"""
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules, input_img_size, output_img_size, hidden_dim, matrix_pose, dp):
super(LACapsuleFC, self).__init__()
self.in_n_capsules = in_n_capsules # This is n_caps * h_in * w_in
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.input_img_size = input_img_size
self.output_img_size = output_img_size
self.matrix_pose = matrix_pose
self.dropout_rate = dp
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.drop = nn.Dropout(self.dropout_rate)
self.scale = 1. / (out_d_capsules ** 0.5)
self.linformer_attention = LinformerProjectionKernel(in_n_capsules=in_n_capsules, in_d_capsules=in_d_capsules, out_n_capsules=out_n_capsules,
out_d_capsules=out_d_capsules, matrix_pose=self.matrix_pose, layer_type='FC', input_img_size = input_img_size, output_img_size = output_img_size,
hidden_dim = hidden_dim, kernel_size=1, dropout = 0.)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, matrix_pose={}, \
dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules, self.matrix_pose,
self.dropout_rate
)
def forward(self, input, num_iter, next_capsule_value=None):
# b: batch size
# n: num of capsules in current layer
# a: dim of capsules in current layer
# m: num of capsules in next layer
# d: dim of capsules in next layer
# print("Input ", input.shape)
if len(input.shape) == 5:
input = input.permute(0, 4, 1, 2, 3)
input = input.contiguous().view(input.shape[0], input.shape[1], -1)
input = input.permute(0,2,1)
# print("Transformed ", input.shape)
batch_size = input.shape[0]
next_capsule_value = self.linformer_attention(current_pose=input, h_out=1, w_out=1, next_pose=next_capsule_value)
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
class LACapsuleCONV(nn.Module):
r"""Applies as a capsule convolutional layer.
TBD
"""
def __init__(self, in_n_capsules, in_d_capsules, out_n_capsules, out_d_capsules,
kernel_size, stride, input_img_size, output_img_size, hidden_dim,matrix_pose, dp, padding=None, coordinate_add=False):
super(LACapsuleCONV, self).__init__()
self.in_n_capsules = in_n_capsules
self.in_d_capsules = in_d_capsules
self.out_n_capsules = out_n_capsules
self.out_d_capsules = out_d_capsules
self.input_img_size = input_img_size
self.output_img_size = output_img_size
self.kernel_size = kernel_size
self.stride = stride
self.matrix_pose = matrix_pose
self.coordinate_add = coordinate_add
self.padding = padding
self.nonlinear_act = nn.LayerNorm(out_d_capsules)
self.dropout_rate = dp
self.drop = nn.Dropout(self.dropout_rate)
self.linformer_attention = LinformerProjectionKernel(in_n_capsules=in_n_capsules, in_d_capsules=in_d_capsules, out_n_capsules=out_n_capsules,
out_d_capsules=out_d_capsules, matrix_pose=self.matrix_pose, layer_type='conv', input_img_size = input_img_size, output_img_size = output_img_size,
hidden_dim=hidden_dim, kernel_size=kernel_size, dropout = 0.)
def extra_repr(self):
return 'in_n_capsules={}, in_d_capsules={}, out_n_capsules={}, out_d_capsules={}, \
kernel_size={}, stride={}, coordinate_add={}, matrix_pose={}, \
dropout_rate={}'.format(
self.in_n_capsules, self.in_d_capsules, self.out_n_capsules, self.out_d_capsules,
self.kernel_size, self.stride, self.coordinate_add, self.matrix_pose,
self.dropout_rate
)
def input_expansion(self, input):
# input has size [batch x num_of_capsule x height x width x x capsule_dimension]
if self.padding:
input = input.permute([0,1,4,2,3]) #For padding h,w
if not self.padding%1:
input = F.pad(input, [self.padding, self.padding, self.padding, self.padding]) #TODO: Padding to maintain same size, change so that caps dim not padded
else:
input = F.pad(input, [math.ceil(self.padding), math.floor(self.padding), math.ceil(self.padding), math.floor(self.padding)]) #TODO: Padding to maintain same size, change so that caps dim not padded
input = input.permute([0,1,3,4,2])
unfolded_input = input.unfold(2,size=self.kernel_size,step=self.stride).unfold(3,size=self.kernel_size,step=self.stride)
unfolded_input = unfolded_input.permute([0,1,5,6,2,3,4])
# output has size [batch x num_of_capsule x kernel_size x kernel_size x h_out x w_out x capsule_dimension]
return unfolded_input
def forward(self, input, num_iter, next_capsule_value=None):
# k,l: kernel size
# h,w: output width and length
inputs = self.input_expansion(input)
batch_size = inputs.shape[0]
h_out = inputs.shape[4]
w_out = inputs.shape[5]
next_capsule_value = self.linformer_attention(current_pose=inputs, h_out=h_out, w_out=w_out, next_pose=next_capsule_value)
next_capsule_value = self.drop(next_capsule_value)
if not next_capsule_value.shape[-1] == 1:
next_capsule_value = self.nonlinear_act(next_capsule_value)
return next_capsule_value
| 48.879641
| 214
| 0.591146
| 4,923
| 38,175
| 4.287629
| 0.071704
| 0.056282
| 0.081865
| 0.036005
| 0.847072
| 0.831959
| 0.808509
| 0.795338
| 0.787284
| 0.770371
| 0
| 0.015409
| 0.318324
| 38,175
| 780
| 215
| 48.942308
| 0.795681
| 0.129928
| 0
| 0.746094
| 0
| 0
| 0.011071
| 0
| 0
| 0
| 0
| 0.003846
| 0
| 1
| 0.074219
| false
| 0
| 0.013672
| 0.015625
| 0.164063
| 0.003906
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9e628c7670538883c0224471419833f2f3273e0
| 22,331
|
py
|
Python
|
datadog_checks_base/tests/openmetrics/test_transformers/test_histogram.py
|
vbarbaresi/integrations-core
|
ab26ab1cd6c28a97c1ad1177093a93659658c7aa
|
[
"BSD-3-Clause"
] | null | null | null |
datadog_checks_base/tests/openmetrics/test_transformers/test_histogram.py
|
vbarbaresi/integrations-core
|
ab26ab1cd6c28a97c1ad1177093a93659658c7aa
|
[
"BSD-3-Clause"
] | null | null | null |
datadog_checks_base/tests/openmetrics/test_transformers/test_histogram.py
|
vbarbaresi/integrations-core
|
ab26ab1cd6c28a97c1ad1177093a93659658c7aa
|
[
"BSD-3-Clause"
] | null | null | null |
# (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pytest
from datadog_checks.dev.testing import requires_py3
from ..utils import get_check
pytestmark = [
requires_py3,
pytest.mark.openmetrics,
pytest.mark.openmetrics_transformers,
pytest.mark.openmetrics_transformers_histogram,
]
def assert_metric_counts(aggregator, payload):
num_bucket_metrics = 0
num_sum_metrics = 0
num_count_metrics = 0
lines = [line.strip() for line in payload.strip().splitlines()]
metric_name = lines[0].split()[2]
lines = lines[2:]
for line in lines:
if line.startswith('{}_sum'.format(metric_name)):
num_sum_metrics += 1
elif line.startswith('{}_count'.format(metric_name)):
num_count_metrics += 1
elif 'Inf"' not in line:
num_bucket_metrics += 1
assert len(aggregator.metrics('test.{}.bucket'.format(metric_name))) == num_bucket_metrics
assert len(aggregator.metrics('test.{}.sum'.format(metric_name))) == num_sum_metrics
assert len(aggregator.metrics('test.{}.count'.format(metric_name))) == num_count_metrics
def test_default(aggregator, dd_run_check, mock_http_response):
payload = """
# HELP etcd_disk_wal_fsync_duration_seconds The latency distributions of fsync called by wal.
# TYPE etcd_disk_wal_fsync_duration_seconds histogram
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.001"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.002"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.004"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.008"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.016"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.032"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.064"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.128"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.256"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.512"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="1.024"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="2.048"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="4.096"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="8.192"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="+Inf"} 4
etcd_disk_wal_fsync_duration_seconds_sum{kind="fs",app="vault"} 0.026131671
etcd_disk_wal_fsync_duration_seconds_count{kind="fs",app="vault"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.001"} 718
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.002"} 740
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.004"} 743
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.008"} 748
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.016"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.032"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.064"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.128"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.256"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.512"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="1.024"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="2.048"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="4.096"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="8.192"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="+Inf"} 751
etcd_disk_wal_fsync_duration_seconds_sum{kind="fs",app="kubernetes"} 0.3097010759999998
etcd_disk_wal_fsync_duration_seconds_count{kind="fs",app="kubernetes"} 751
"""
mock_http_response(payload)
check = get_check({'metrics': ['.+']})
dd_run_check(check)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.bucket',
2,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:vault', 'upper_bound:0.001'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.sum',
0.026131671,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:vault'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.count',
4,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:vault'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.bucket',
718,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:kubernetes', 'upper_bound:0.001'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.sum',
0.3097010759999998,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:kubernetes'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.count',
751,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:kubernetes'],
)
aggregator.assert_all_metrics_covered()
assert_metric_counts(aggregator, payload)
def test_disable_histogram_buckets(aggregator, dd_run_check, mock_http_response):
payload = """
# HELP etcd_disk_wal_fsync_duration_seconds The latency distributions of fsync called by wal.
# TYPE etcd_disk_wal_fsync_duration_seconds histogram
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.001"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.002"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.004"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.008"} 2
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.016"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.032"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.064"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.128"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.256"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="0.512"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="1.024"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="2.048"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="4.096"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="8.192"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="vault",le="+Inf"} 4
etcd_disk_wal_fsync_duration_seconds_sum{kind="fs",app="vault"} 0.026131671
etcd_disk_wal_fsync_duration_seconds_count{kind="fs",app="vault"} 4
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.001"} 718
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.002"} 740
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.004"} 743
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.008"} 748
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.016"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.032"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.064"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.128"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.256"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="0.512"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="1.024"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="2.048"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="4.096"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="8.192"} 751
etcd_disk_wal_fsync_duration_seconds_bucket{kind="fs",app="kubernetes",le="+Inf"} 751
etcd_disk_wal_fsync_duration_seconds_sum{kind="fs",app="kubernetes"} 0.3097010759999998
etcd_disk_wal_fsync_duration_seconds_count{kind="fs",app="kubernetes"} 751
"""
mock_http_response(payload)
check = get_check({'metrics': ['.+'], 'collect_histogram_buckets': False})
dd_run_check(check)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.sum',
0.026131671,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:vault'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.count',
4,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:vault'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.sum',
0.3097010759999998,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:kubernetes'],
)
aggregator.assert_metric(
'test.etcd_disk_wal_fsync_duration_seconds.count',
751,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'kind:fs', 'app:kubernetes'],
)
aggregator.assert_all_metrics_covered()
assert len(aggregator.metrics('test.etcd_disk_wal_fsync_duration_seconds.sum')) == 2
assert len(aggregator.metrics('test.etcd_disk_wal_fsync_duration_seconds.count')) == 2
def test_non_cumulative_histogram_buckets(aggregator, dd_run_check, mock_http_response):
payload = """
# HELP rest_client_request_latency_seconds Request latency in seconds. Broken down by verb and URL.
# TYPE rest_client_request_latency_seconds histogram
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.004"} 702
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.001"} 254
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.002"} 621
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.008"} 727
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.016"} 738
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.032"} 744
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.064"} 748
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.128"} 754
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.256"} 755
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.512"} 755
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="+Inf"} 755
rest_client_request_latency_seconds_sum{url="http://127.0.0.1:8080/api",verb="GET"} 2.185820220000001
rest_client_request_latency_seconds_count{url="http://127.0.0.1:8080/api",verb="GET"} 755
"""
mock_http_response(payload)
check = get_check({'metrics': ['.+'], 'non_cumulative_histogram_buckets': True})
dd_run_check(check)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
81,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.004', 'lower_bound:0.002'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
254,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.001', 'lower_bound:0'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
367,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.002', 'lower_bound:0.001'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
25,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.008', 'lower_bound:0.004'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
11,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.016', 'lower_bound:0.008'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
6,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.032', 'lower_bound:0.016'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
4,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.064', 'lower_bound:0.032'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
6,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.128', 'lower_bound:0.064'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
1,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.256', 'lower_bound:0.128'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.bucket',
0,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.512', 'lower_bound:0.256'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.sum',
2.185820220000001,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.count',
755,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET'],
)
aggregator.assert_all_metrics_covered()
assert_metric_counts(aggregator, payload)
def test_non_cumulative_histogram_buckets_single_bucket(aggregator, dd_run_check, mock_http_response):
payload = """
# HELP rest_client_request_latency_seconds Request latency in seconds. Broken down by verb and URL.
# TYPE rest_client_request_latency_seconds histogram
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="+Inf"} 755
rest_client_request_latency_seconds_sum{url="http://127.0.0.1:8080/api",verb="GET"} 2.185820220000001
rest_client_request_latency_seconds_count{url="http://127.0.0.1:8080/api",verb="GET"} 755
"""
mock_http_response(payload)
check = get_check({'metrics': ['.+'], 'non_cumulative_histogram_buckets': True})
dd_run_check(check)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.sum',
2.185820220000001,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET'],
)
aggregator.assert_metric(
'test.rest_client_request_latency_seconds.count',
755,
metric_type=aggregator.MONOTONIC_COUNT,
tags=['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET'],
)
aggregator.assert_all_metrics_covered()
def test_histogram_buckets_as_distributions(aggregator, dd_run_check, mock_http_response):
payload = """
# HELP rest_client_request_latency_seconds Request latency in seconds. Broken down by verb and URL.
# TYPE rest_client_request_latency_seconds histogram
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.004"} 702
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.001"} 254
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.002"} 621
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.008"} 727
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.016"} 738
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.032"} 744
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.064"} 748
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.128"} 754
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.256"} 755
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="0.512"} 755
rest_client_request_latency_seconds_bucket{url="http://127.0.0.1:8080/api",verb="GET",le="+Inf"} 755
rest_client_request_latency_seconds_sum{url="http://127.0.0.1:8080/api",verb="GET"} 2.185820220000001
rest_client_request_latency_seconds_count{url="http://127.0.0.1:8080/api",verb="GET"} 755
"""
mock_http_response(payload)
check = get_check(
{
'metrics': ['.+'],
'histogram_buckets_as_distributions': True,
# Implicitly activated
'collect_histogram_buckets': False,
}
)
dd_run_check(check)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
81,
0.002,
0.004,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.004', 'lower_bound:0.002'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
254,
0,
0.001,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.001', 'lower_bound:0'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
367,
0.001,
0.002,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.002', 'lower_bound:0.001'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
25,
0.004,
0.008,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.008', 'lower_bound:0.004'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
11,
0.008,
0.016,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.016', 'lower_bound:0.008'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
6,
0.016,
0.032,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.032', 'lower_bound:0.016'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
4,
0.032,
0.064,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.064', 'lower_bound:0.032'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
6,
0.064,
0.128,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.128', 'lower_bound:0.064'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
1,
0.128,
0.256,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.256', 'lower_bound:0.128'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
0,
0.256,
0.512,
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:0.512', 'lower_bound:0.256'],
)
aggregator.assert_histogram_bucket(
'test.rest_client_request_latency_seconds',
0,
0.512,
float('Inf'),
True,
check.hostname,
['endpoint:test', 'url:http://127.0.0.1:8080/api', 'verb:GET', 'upper_bound:inf', 'lower_bound:0.512'],
)
aggregator.assert_all_metrics_covered()
| 48.971491
| 118
| 0.685907
| 3,196
| 22,331
| 4.47184
| 0.049124
| 0.086412
| 0.064652
| 0.094039
| 0.945144
| 0.938287
| 0.930171
| 0.920655
| 0.920095
| 0.920025
| 0
| 0.083615
| 0.166674
| 22,331
| 455
| 119
| 49.079121
| 0.6844
| 0.005777
| 0
| 0.755344
| 0
| 0.068884
| 0.648556
| 0.335451
| 0
| 0
| 0
| 0
| 0.114014
| 1
| 0.014252
| false
| 0
| 0.007126
| 0
| 0.021378
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b9ecbee7ad731407ca361a14a9a038541cfa19a6
| 12,520
|
py
|
Python
|
lib/ocr_correction.py
|
jarobyte91/post_ocr_correction
|
bae2e601c838a23cc31a82e10ed5cd1b10ccdac6
|
[
"MIT"
] | 3
|
2021-11-15T08:29:39.000Z
|
2021-12-20T21:56:54.000Z
|
lib/ocr_correction.py
|
jarobyte91/post_ocr_correction
|
bae2e601c838a23cc31a82e10ed5cd1b10ccdac6
|
[
"MIT"
] | 3
|
2021-11-15T08:29:36.000Z
|
2022-01-06T13:52:34.000Z
|
lib/ocr_correction.py
|
jarobyte91/post_ocr_correction
|
bae2e601c838a23cc31a82e10ed5cd1b10ccdac6
|
[
"MIT"
] | 1
|
2021-11-08T20:15:52.000Z
|
2021-11-08T20:15:52.000Z
|
from collections import Counter
import re
from math import exp
from metrics import levenshtein
import pandas as pd
from timeit import default_timer as t
from tqdm.notebook import tqdm
######################
# weighting functions
######################
def uniform(j, window_size):
return 1.0
def triangle(j, window_size):
m = window_size//2
return m - 0.5 * abs(m - j)
def bell(j, window_size):
m = window_size // 2
s = window_size // 2
return exp(-((m-j)/s)**2)
######################
# correction functions
######################
def correct_by_disjoint_window(string,
model,
vocabulary,
window_size = 50,
decoding_method = "greedy_search",
document_progress_bar = False,
document_batch_progress_bar = 0,
*arcorrect):
model.eval()
windows = [string[i:i+window_size] for i in range(0, len(string), window_size)]
windows = ["".join([vocabulary.lookup(c) for c in s]).replace("<UNK>", " ") for s in windows]
X = model.text2tensor(windows)
predictions, probs = model.predict(X,
predictions = window_size,
method = decoding_method,
progress_bar = document_batch_progress_bar,
main_progress_bar = document_progress_bar,
*arcorrect)
return re.sub(r"<START>|<END>|<PAD>", "", "".join(model.tensor2text(predictions)))
def correct_by_sliding_window(string,
model,
vocabulary,
window_size = 50,
weighting = uniform,
decoding_method = "greedy_search",
document_progress_bar = False,
document_batch_progress_bar = 0,
main_batch_size = 1024,
*arcorrect):
model.eval()
if len(string) <= window_size:
windows = [string]
else:
windows = [string[i:i + window_size] for i in range(len(string) - window_size + 1)]
windows = ["".join([vocabulary.lookup(c) for c in s]).replace("<UNK>", " ") for s in windows]
X = model.text2tensor(windows)
predictions, probs = model.predict(X,
predictions = window_size,
method = decoding_method,
main_progress_bar = document_progress_bar,
progress_bar = document_batch_progress_bar,
*arcorrect)
output = [re.sub(r"<START>|<END>|<PAD>", "", s) for s in model.tensor2text(predictions)]
votes = [{k:0.0 for k in vocabulary} for c in string]
for i, s in enumerate(output):
for j, (counter, char) in enumerate(zip(votes[i:i + window_size], s)):
counter[char] += weighting(j, window_size)
return votes, "".join([max(c.keys(), key = lambda x: c[x]) for c in votes])
def evaluate_model(raw, gs, model, vocabulary, save_path, window_size = 10,
document_progress_bar = False):
metrics = []
old = levenshtein(reference = gs, hypothesis = raw).cer.mean()
# disjoint
print("disjoint window...")
print("greedy_search...")
start = t()
corrections = [correct_by_disjoint_window(s,
model,
vocabulary,
document_progress_bar = document_progress_bar,
window_size = window_size)
for s in raw]
metrics.append({"window":"disjoint",
"decoding":"greedy",
"window_size":window_size * 2,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
start = t()
corrections = [correct_by_disjoint_window(s,
model,
vocabulary,
document_progress_bar = document_progress_bar,
window_size = window_size * 2)
for s in raw]
metrics.append({"window":"disjoint",
"decoding":"greedy",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
print("beam_search...")
start = t()
corrections = [correct_by_disjoint_window(s,
model,
vocabulary,
decoding_method = "beam_search",
document_progress_bar = document_progress_bar,
window_size = window_size)
for s in raw]
metrics.append({"window":"disjoint",
"decoding":"beam",
"window_size":window_size * 2,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
start = t()
corrections = [correct_by_disjoint_window(s,
model,
vocabulary,
decoding_method = "beam_search",
document_progress_bar = document_progress_bar,
window_size = window_size * 2)
for s in raw]
metrics.append({"window":"disjoint",
"decoding":"beam",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
# sliding
print("sliding, greedy...")
## greedy search
print("uniform...")
start = t()
corrections = [correct_by_sliding_window(s, model, vocabulary,
weighting = uniform,
document_progress_bar = document_progress_bar,
window_size = window_size)[1]
for s in raw]
metrics.append({"window":"sliding",
"decoding":"greedy",
"weighting":"uniform",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
print("triangle...")
start = t()
corrections = [correct_by_sliding_window(s, model, vocabulary,
weighting = triangle,
document_progress_bar = document_progress_bar,
window_size = window_size)[1]
for s in raw]
metrics.append({"window":"sliding",
"decoding":"greedy",
"weighting":"triangle",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
print("bell...")
start = t()
corrections = [correct_by_sliding_window(s, model, vocabulary, weighting = bell,
document_progress_bar = document_progress_bar,
window_size = window_size)[1]
for s in raw]
metrics.append({"window":"sliding",
"decoding":"greedy",
"weighting":"bell",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
## beam search
print("sliding, beam...")
print("uniform...")
start = t()
corrections = [correct_by_sliding_window(s, model, vocabulary,
decoding_method = "beam_search",
weighting = uniform,
document_progress_bar = document_progress_bar,
window_size = window_size)[1]
for s in raw]
metrics.append({"window":"sliding",
"decoding":"beam",
"weighting":"uniform",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
print("triangle...")
start = t()
corrections = [correct_by_sliding_window(s, model, vocabulary,
decoding_method = "beam_search",
weighting = triangle,
document_progress_bar = document_progress_bar,
window_size = window_size)[1]
for s in raw]
metrics.append({"window":"sliding",
"decoding":"beam",
"weighting":"triangle",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
print("bell...")
start = t()
corrections = [correct_by_sliding_window(s, model, vocabulary,
decoding_method = "beam_search",
weighting = bell,
document_progress_bar = document_progress_bar,
window_size = window_size)[1]
for s in raw]
metrics.append({"window":"sliding",
"decoding":"beam",
"weighting":"bell",
"window_size":window_size,
"inference_seconds":t() - start,
"cer_before":old,
"cer_after":levenshtein(gs, corrections).cer.mean()})
pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before)).to_csv(save_path, index = False)
return pd.DataFrame(metrics).assign(improvement = lambda df: 100 * (1 - df.cer_after / df.cer_before))
| 49.68254
| 132
| 0.478195
| 1,156
| 12,520
| 4.956747
| 0.102076
| 0.101222
| 0.082897
| 0.069808
| 0.8363
| 0.828447
| 0.798429
| 0.776789
| 0.776789
| 0.776789
| 0
| 0.011163
| 0.413259
| 12,520
| 251
| 133
| 49.880478
| 0.768854
| 0.006629
| 0
| 0.816964
| 0
| 0
| 0.088736
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026786
| false
| 0
| 0.03125
| 0.004464
| 0.084821
| 0.049107
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9f10df4ccd0e8fe899f5d96f5c7af4a1aeb1aeb
| 1,248
|
py
|
Python
|
decorators.py
|
munhanha/mtg-random
|
30211f1c1824db41b0e4b6e237db2fccca9c43a8
|
[
"BSD-3-Clause"
] | null | null | null |
decorators.py
|
munhanha/mtg-random
|
30211f1c1824db41b0e4b6e237db2fccca9c43a8
|
[
"BSD-3-Clause"
] | null | null | null |
decorators.py
|
munhanha/mtg-random
|
30211f1c1824db41b0e4b6e237db2fccca9c43a8
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib.auth.decorators import user_passes_test
from django.contrib.auth import REDIRECT_FIELD_NAME
def is_staff(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is staff, redirecting
to the log-in page if necessary.
Possible usage:
@is_staff
def view....
urlpatterns = patterns('',
(r'^databrowse/(.*)', is_staff(databrowse.site.root)),
)
"""
actual_decorator = user_passes_test(
lambda u: u.is_staff,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def is_superuser(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is superuser, redirecting
to the log-in page if necessary.
Possible usage:
@is_superuser
def view....
urlpatterns = patterns('',
(r'^databrowse/(.*)', is_staff(databrowse.site.root)),
)
"""
actual_decorator = user_passes_test(
lambda u: u.is_superuser,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| 28.363636
| 90
| 0.741186
| 169
| 1,248
| 5.213018
| 0.272189
| 0.132804
| 0.173666
| 0.113507
| 0.817253
| 0.817253
| 0.817253
| 0.817253
| 0.817253
| 0.817253
| 0
| 0
| 0.163462
| 1,248
| 44
| 91
| 28.363636
| 0.84387
| 0.379006
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0.15
| 0.1
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
6a2603a2e44e85b57497520e7ff7bbfac46d9f5b
| 7,326
|
py
|
Python
|
customplotting.py
|
JAmarel/FilmThickness
|
5c98580bd36b823448750ae008c838d0b49c5938
|
[
"MIT"
] | null | null | null |
customplotting.py
|
JAmarel/FilmThickness
|
5c98580bd36b823448750ae008c838d0b49c5938
|
[
"MIT"
] | null | null | null |
customplotting.py
|
JAmarel/FilmThickness
|
5c98580bd36b823448750ae008c838d0b49c5938
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
def HSM_Plot(RedIntensity,GreenIntensity,BlueIntensity,RedSTD,GreenSTD,BlueSTD,ExposureTimes):
"""Plots intensity values as recorded from imagej histograms vs ExposureTime.
Parameters
----------
RedIntensity: (N+1,) array.
Numpy array containing N+1 subarrays. Where each sub array corresponds to
a different lasso'd region. Each entry in each sub array is an intensity
value that corresponds to a certain exposure time. N is the number of
lasso'd regions. So it has shape (N+1,) to account for extra HSM data
RedSTD: (N+1,) array
Same as RedIntensity, except now each entry in a sub array is the standard
deviation of each intensity. Calculated from imagej histogram data.
ExposureTimes: (N+1,) array
Same as above. Each entry is now an exposure time.
"""
f, (ax1,ax2,ax3) = plt.subplots(3, 1,figsize=(8,12))
#Removing Top/Right Spine/Ticks
ax1.spines['right'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax1.yaxis.set_ticks_position('left')
ax1.xaxis.set_ticks_position('bottom')
ax2.spines['right'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.yaxis.set_ticks_position('left')
ax2.xaxis.set_ticks_position('bottom')
ax3.spines['right'].set_visible(False)
ax3.spines['top'].set_visible(False)
ax3.yaxis.set_ticks_position('left')
ax3.xaxis.set_ticks_position('bottom')
plt.sca(ax1)
for i in np.arange(0,len(RedIntensity)):
plt.errorbar(ExposureTimes[i],RedIntensity[i], yerr=RedSTD[i],ls='None',marker='None')
plt.title("Red Channel")
plt.xlabel("Exposure Time (s)")
plt.ylabel("Intensity [0-255]")
plt.xlim(0,np.amax(ExposureTimes[i])*1.1)
plt.ylim(0,255)
plt.sca(ax2)
for i in np.arange(0,len(GreenIntensity)):
plt.errorbar(ExposureTimes[i],GreenIntensity[i], yerr=GreenSTD[i],ls='None',marker='None')
plt.title("Green Channel")
plt.xlabel("Exposure Time (s)")
plt.ylabel("Intensity [0-255]")
plt.xlim(0,np.amax(ExposureTimes[i])*1.1)
plt.ylim(0,255)
plt.sca(ax3)
for i in np.arange(0,len(BlueIntensity)):
plt.errorbar(ExposureTimes[i],BlueIntensity[i], yerr=BlueSTD[i],ls='None',marker='None')
plt.title("Blue Channel")
plt.xlabel("Exposure Time (s)")
plt.ylabel("Intensity [0-255]")
plt.xlim(0,np.amax(ExposureTimes[i])*1.1)
plt.ylim(0,255)
plt.tight_layout()
def Line_Plot(RedIntensity,GreenIntensity,BlueIntensity,RedSTD,GreenSTD,BlueSTD,RedSlopes,GreenSlopes,BlueSlopes,ExposureTimes):
"""Plots intensity values as recorded from imagej histograms vs ExposureTime.
Parameters
----------
RedIntensity: (N+1,) array.
Numpy array containing N+1 subarrays. Where each sub array corresponds to
a different lasso'd region. Each entry in each sub array is an intensity
value that corresponds to a certain exposure time. N is the number of
lasso'd regions. So it has shape (N+1,) to account for extra HSM data
RedSTD: (N+1,) array
Same as RedIntensity, except now each entry in a sub array is the standard
deviation of each intensity. Calculated from imagej histogram data.
ExposureTimes: (N+1,) array
Same as above. Each entry is now an exposure time.
"""
f, (ax1,ax2,ax3) = plt.subplots(3, 1,figsize=(8,12))
#Removing Top/Right Spine/Ticks
ax1.spines['right'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax1.yaxis.set_ticks_position('left')
ax1.xaxis.set_ticks_position('bottom')
ax2.spines['right'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.yaxis.set_ticks_position('left')
ax2.xaxis.set_ticks_position('bottom')
ax3.spines['right'].set_visible(False)
ax3.spines['top'].set_visible(False)
ax3.yaxis.set_ticks_position('left')
ax3.xaxis.set_ticks_position('bottom')
plt.sca(ax1)
for i in np.arange(0,len(RedIntensity)):
plt.errorbar(ExposureTimes[i],RedIntensity[i], yerr=RedSTD[i],ls='None',marker='None')
plt.title("Red Channel")
plt.xlabel("Exposure Time (s)")
plt.ylabel("Intensity [0-255]")
plt.xlim(0,np.amax(ExposureTimes[i])*1.1)
plt.ylim(0,255)
t = np.linspace(0,np.amax(ExposureTimes[i]),100)
plt.plot(t,RedSlopes[i]*t)
plt.sca(ax2)
for i in np.arange(0,len(GreenIntensity)):
plt.errorbar(ExposureTimes[i],GreenIntensity[i], yerr=GreenSTD[i],ls='None',marker='None')
plt.title("Green Channel")
plt.xlabel("Exposure Time (s)")
plt.ylabel("Intensity [0-255]")
plt.xlim(0,np.amax(ExposureTimes[i])*1.1)
plt.ylim(0,255)
t = np.linspace(0,np.amax(ExposureTimes[i]),100)
plt.plot(t,GreenSlopes[i]*t)
plt.sca(ax3)
for i in np.arange(0,len(BlueIntensity)):
plt.errorbar(ExposureTimes[i],BlueIntensity[i], yerr=BlueSTD[i],ls='None',marker='None')
plt.title("Blue Channel")
plt.xlabel("Exposure Time (s)")
plt.ylabel("Intensity [0-255]")
plt.xlim(0,np.amax(ExposureTimes[i])*1.1)
plt.ylim(0,255)
t = np.linspace(0,np.amax(ExposureTimes[i]),100)
plt.plot(t,BlueSlopes[i]*t)
plt.tight_layout()
def Ref(N,color):
RedWaveLength = 620e-9
GreenWaveLength = 545e-9
BlueWaveLength = 463e-9
if color == 'red':
y = RedWaveLength
elif color =='green':
y = GreenWaveLength
elif color == 'blue':
y = BlueWaveLength
n = 1.516 #Index of Refraction
a = 3.1e-9 #nm 8CB molecule length
r = -.205 #reflectance at normal incidence
B = 2*np.pi*N*n*a/y
numerator = 2*(r**2)*(1-np.cos(2*B))
denominator = 1 - 2*(r**2)*np.cos(2*B) + r**4
return numerator/denominator
def Ref_Plot(GreenRef,BlueRef,N_Guesses):
#Camera response (for curve)
RedWaveLength = 620e-9
GreenWaveLength = 545e-9
BlueWaveLength = 463e-9
N_Array = np.linspace(2,30,100) #This is used to plot the reflectivity curve fit
f, (ax1,ax2) = plt.subplots(2, 1,figsize=(8,12))
ax1.spines['right'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax1.yaxis.set_ticks_position('left')
ax1.xaxis.set_ticks_position('bottom')
ax2.spines['right'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.yaxis.set_ticks_position('left')
ax2.xaxis.set_ticks_position('bottom')
plt.sca(ax1)
plt.plot(N_Array**2,Ref(N_Array,'green'),color='green');
plt.scatter(N_Guesses**2,GreenRef,color = 'green');
plt.xlim(0,(np.amax(N_Array)**2)*1.1);
plt.ylim(0,np.amax(GreenRef)*2);
plt.title("Green Channel Reflectivity")
plt.xlabel("N$^2$")
plt.ylabel("Reflectivity")
plt.sca(ax2)
plt.plot(N_Array**2,Ref(N_Array,'blue'),color='blue');
plt.scatter(N_Guesses**2,BlueRef,color = 'blue');
plt.xlim(0,(np.amax(N_Array)**2)*1.1);
plt.ylim(0,np.amax(GreenRef)*2);
plt.title("Blue Channel Reflectivity")
plt.xlabel("N$^2$")
plt.ylabel("Reflectivity")
plt.tight_layout()
| 35.391304
| 128
| 0.643871
| 1,063
| 7,326
| 4.377234
| 0.160865
| 0.034386
| 0.05158
| 0.038685
| 0.864174
| 0.856007
| 0.856007
| 0.828498
| 0.816677
| 0.792177
| 0
| 0.038945
| 0.20789
| 7,326
| 207
| 129
| 35.391304
| 0.762881
| 0.217854
| 0
| 0.776119
| 0
| 0
| 0.105903
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029851
| false
| 0
| 0.014925
| 0
| 0.052239
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e04ea17508243d8784ae7454a0e7b1014875f9a3
| 17,989
|
py
|
Python
|
lib/_shr.py
|
zal-byte/SashinKakushi
|
51d703855d4ade04d08c91b7618c65c27652267a
|
[
"MIT"
] | 1
|
2021-10-02T04:55:06.000Z
|
2021-10-02T04:55:06.000Z
|
lib/_shr.py
|
zal-byte/SashinKakushi
|
51d703855d4ade04d08c91b7618c65c27652267a
|
[
"MIT"
] | null | null | null |
lib/_shr.py
|
zal-byte/SashinKakushi
|
51d703855d4ade04d08c91b7618c65c27652267a
|
[
"MIT"
] | null | null | null |
from pfpfpf import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x08\x2d\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x3a\x11\x00\x00\x00\x00\x00\x18\x47\x4b\xc7\x6a\xbf\xa8\xac\x6c\xa1\xdf\xc4\xa9\x8a\x2a\x78\x39\x00\x00\x00\x00\x00\x00\x00\x00\x7c\x70\x98\x74\xaa\x01\xa2\xca\x16\xb3\x18\x23\xd4\xd6\xd3\xa4\x9b\xfb\x49\xb0\x65\xce\xee\xf4\x5f\xfc\xd7\xc3\x9f\x81\x95\x2d\xe6\x94\x13\xb7\xc9\x58\x3a\xba\xfc\x93\xfa\x9d\x84\xbf\x85\x0a\xcb\xa8\x65\x2c\xde\xc3\xe4\x4a\xeb\x07\xf9\x65\xd0\xa1\xfd\xce\x92\x08\x4b\x6f\x91\xdb\x15\xdb\xef\x96\xd5\x2f\x41\x76\x5c\x16\x83\xb0\x36\xad\x84\x26\xcd\x44\xf1\x69\x26\x60\xeb\xb7\x2d\x9b\x7f\xec\x7c\xbd\x41\x10\xc4\x66\x64\x17\x6d\x3b\xbd\x37\x2c\xc6\x49\x1c\x81\x49\x35\x96\x4c\x2d\xde\x38\x90\x83\xe4\xd1\x41\x2b\x5c\xae\x7b\x78\x60\x32\x3d\x6d\x11\x7d\xd3\xa1\x74\x18\xab\xbd\xe5\xd3\xa8\xb0\x25\x38\x4a\xb2\xec\xf5\xe7\xa7\x75\x3b\x27\x8d\xfa\x9e\x0c\x26\x0c\xa9\x3b\x0c\x34\xa4\x0b\x0f\x84\x08\x75\x30\x1b\x4d\xfc\x67\xc3\x2c\xb8\xdf\x32\xdd\xc0\xcb\x2d\x7a\x5a\x0a\x8b\xfe\xaf\x0f\xb7\x59\xb6\x04\x7b\x39\x5e\x9e\xc3\x3a\x85\x60\xa8\x2c\x19\x59\x75\x97\x12\x18\xab\xa8\x40\xcc\xde\xf2\xe7\x24\xca\xb8\xca\x5a\x91\x9a\x7d\x7e\x90\x3c\xdd\x18\x9a\xd7\x7b\x5d\xe5\x72\x54\x89\x43\x67\xc1\x1b\x98\xdb\xb3\x72\x3a\x36\x43\x89\xa1\x6a\xc5\x24\x20\x39\xaa\x85\xe2\xbd\x36\xad\xb7\xe2\xa0\xba\x42\x0c\xaf\x57\x8d\x75\x56\x73\xb7\x38\x3a\xdb\xa0\xab\x81\x15\xc2\x90\x9a\xc3\x39\xb7\x9b\x9f\x2c\x6b\x27\xd7\xaf\xcd\x56\xd9\x9c\x88\x0a\x40\x4e\xf6\xa4\xc5\xd1\x9c\x5c\xcb\x7c\xfb\x0c\x5e\xde\x99\xc8\xbc\xe5\x7d\xc4\x6d\xfd\x27\x74\xcd\xe1\xe8\xe8\xf8\xa2\x11\xaf\xcc\xde\x6a\xea\x60\x97\x91\xd1\x19\x9e\xc5\x3d\x79\x9e\x6d\x98\x60\x14\x18\x5f\x42\x35\xc7\xee\x1d\x9e\xd6\x82\x84\x9c\x56\xb5\x42\x9f\xcc\xd7\x07\x92\xd0\x19\x79\xf5\x08\xe8\x2c\x92\xa8\x35\x4e\xf2\x69\xfc\xa7\x9f\x0a\x6e\xcf\x4f\xba\x91\x27\x2f\x91\x87\xac\x7c\xf8\xad\x2f\xe6\xe6\xdc\x5c\xc0\xa5\x7a\x38\x9f\x37\xff\x4a\x3a\x05\x5c\x5b\x00\xfb\xdf\x71\xc8\x26\x7e\xd6\x04\x32\x32\x21\x2d\x6c\x4b\x31\x4a\xb4\xe8\x79\xd7\xb2\x2f\x63\x1d\xa1\x1f\xa2\x0b\xc7\xbe\x0b\x96\x49\x00\xe9\x1a\xaf\xe8\x47\xf0\x01\xd9\xa6\xb9\xe8\xb7\xd9\x69\x44\x4c\xb8\xc4\x56\x36\x3d\xf2\x04\x29\x1c\x10\x27\x6b\x53\xfa\x19\xdb\xe0\xc8\xcf\xaa\xd0\xbf\x70\x03\xe1\x25\x5c\x42\x2d\xab\xb7\xd5\xfc\xe8\xf9\x35\xa3\xdc\xb2\x25\x72\x06\xe1\x1e\x47\xfc\x7f\xac\x90\xd1\xf2\xd9\xa4\x98\x33\x74\xc6\x3a\x12\x44\x20\xbc\x4a\x55\xb3\x71\xfe\xf0\xde\x19\x62\x61\x56\x56\x54\x6b\x1e\x69\x2d\xab\xd2\x19\xd4\x29\x8c\x3f\xda\x56\x6e\x86\xd9\x8d\x34\x33\x1e\xce\xeb\x54\x05\x9d\x4a\x2a\xd4\x23\x79\x2e\x6f\xf4\xbd\x6f\x14\x04\x75\x1a\x64\x67\x0c\xb5\x1c\xdc\xa7\x5c\xd7\xf6\xe8\x81\x49\xc8\x7e\x4c\x19\x48\xeb\x48\xc4\x98\xf0\x5c\x27\xd5\x11\x1c\x34\x1a\x0b\x91\x1c\x34\xe7\xb7\xb7\x6e\xb4\x62\xc4\xe4\xa1\x97\xdc\xdc\x91\xa3\x18\xf3\xa4\xc7\xb3\x27\x17\xa4\x0d\x00\x20\x13\xf0\x8b\x78\x7b\x35\x29\x1f\xcf\xb6\xb0\xd3\xb2\x83\x0f\x49\x53\xf6\x91\x79\xb5\x8e\x41\x2e\xd4\x88\x55\x18\x6f\x08\xfe\x0e\x1c\x77\x25\xea\xc1\xef\xc5\xb5\x3c\x77\x5e\x32\x34\xf4\xc4\x0f\x9b\xe9\xba\x26\xae\xc5\xe7\xfb\x10\x60\x09\x15\xd3\x6b\x40\xc2\xf4\x57\x1b\x15\xe5\x9c\x04\x54\x78\x98\x21\x91\x03\xef\xd5\x06\xfa\xc0\x91\xa4\x71\xdd\xc4\x2c\x24\xe8\x20\x07\x73\x7c\x28\xe3\xe1\x4f\x4d\xae\x7c\x76\xff\x5f\xbb\x44\xb2\xfe\x99\x52\xa9\xba\x74\xe1\xa5\x66\x81\xb8\x13\x7b\xc0\x90\x0c\x9b\x87\x73\xa1\x4a\xab\x3a\xef\xda\xe0\x34\x1c\xa0\xf2\x73\xbc\x59\xb2\x0e\xdb\x55\xba\xa0\x40\x8b\x0d\x40\x46\x4c\x61\xa9\xf4\x25\xaf\x98\x11\x9b\xa2\xf4\xe7\x23\x6d\xcc\xfe\x1b\xdc\xea\x34\xd1\x6e\x00\xe2\x05\x62\x58\x2c\x16\x99\xf1\x33\x45\xaa\x52\xd1\xe6\x62\x81\x31\x1b\xdc\x11\x39\x3a\x7c\x23\x17\x08\x7e\xf1\xb9\x00\x0e\x45\xc0\xfc\xa8\xd1\xd4\xed\x59\xaf\x61\x8a\x39\xd7\xc0\xd6\xb2\x53\xa9\x4f\xf9\xe8\x07\x9e\x08\xa8\xff\x1d\x61\xb6\x6e\xd3\xd5\xc4\x2d\x01\x38\xbf\xcd\xee\x8c\x06\x9d\x8e\x69\xfb\xf9\xca\x02\x6b\xfc\xd5\x1d\xb4\xd0\x2b\x69\xd6\xd7\x12\x1e\x48\x79\x87\x06\x49\x2a\x49\x69\xd2\x66\x57\x25\xa3\x85\x99\x1d\xa9\x1a\xdb\xb7\x13\x6e\x96\xe5\x46\x72\x9e\x70\xe1\x79\xb5\xa1\x26\xfb\x3c\x81\x47\x22\xb1\x4f\x09\xd6\xa8\xd8\x8c\x31\x41\x0f\x99\xdb\xb0\x0a\x09\x12\xc9\x3d\xcb\x50\x0a\x14\xad\x00\xac\x34\x7e\x7e\x25\x5b\xa8\x80\x0d\x94\x8f\x36\xb3\x4b\x5a\x28\xb6\x98\x33\x67\x0e\xb4\x4e\x47\x35\x6c\x29\x77\x57\x3b\xf3\xd6\xb2\xf7\xd8\x5d\xa0\x83\xcf\x88\x12\xec\x97\x3b\x5d\x08\x01\xe6\xb8\x03\x94\xa1\xff\x8e\xca\x83\x80\x9b\x91\x96\x58\x5d\x66\xcf\x73\x74\x9d\x5b\x1b\xf3\xe1\x2c\x23\x42\xe2\xdd\x7a\xe4\x27\xe3\xc8\xb3\x96\x8d\xf8\x71\x39\x81\x6d\x01\x9a\xb0\x62\xe4\xc1\xd7\x4e\x66\x2e\x48\x96\x31\xd4\x53\x50\xa5\x60\xe4\xed\xbf\x29\xe6\x04\xa7\xa7\x33\x2e\x56\xb0\xd2\x9e\xd4\x69\xf3\xdf\x8b\xf4\x5d\xbc\xd2\x69\x61\xeb\x6c\xae\x41\x1b\x96\xba\x68\x68\x7f\x21\x70\x95\x88\x6f\x80\xaa\x11\xff\x13\x28\x38\x3a\xfb\x37\x2e\x75\xee\x41\xd1\xe7\x8c\xaf\xc3\x1d\x60\x79\xcf\x7a\x65\xe8\x6d\xf5\x97\xd3\x0e\xa2\xfb\x21\x1f\x10\x7f\x5b\x08\xd5\xee\x8e\x82\x20\xd4\x94\xa6\xb4\xff\x08\x4d\x70\x02\x6f\xc5\xc9\x9e\x45\x62\x0a\xa0\x5a\xb4\x6b\x2b\xac\x9a\x8a\xaf\xb4\xc9\x83\x18\xc2\x0f\xf5\xc1\x31\x49\x32\xd2\x2f\x30\xb7\x58\x44\x09\x3b\x74\x82\x0c\xe7\xf7\xd5\xf4\x5d\x99\x8a\x33\x9c\x38\x25\xb0\x42\x00\x30\xc9\x11\xbc\x00\x6b\x2f\x58\xb7\x62\xd3\x31\x97\xc1\x18\x62\x35\xb8\xe9\x66\xf6\xd8\xe0\xe2\xf3\xbf\x22\x4b\xcc\x75\xc5\x0f\x74\x0a\xec\x28\xaa\x96\xef\x6f\xd2\x78\x52\xc0\x90\x1d\x81\xf4\x23\x6d\xa8\xe4\x04\x1b\xe7\x0f\x81\x82\xff\x07\x08\x64\x81\x36\x64\xea\xf9\x4f\x03\x09\x23\x29\x73\x9e\x4f\xcb\x4c\x7f\x30\x3b\xd4\x7c\x86\x9d\xdc\xa0\xdd\x38\x2c\x86\x17\x4c\x01\x77\xa6\xe1\x97\x2e\x77\xfd\xbe\x9c\xb6\x75\x9f\xb6\x19\x91\xd1\xb2\x0b\x22\x71\xda\xdb\x55\xfe\x1d\x1a\x0e\xd7\x2d\x1e\x48\xfa\xea\x21\xf5\x67\x9d\xaa\x4e\xfa\xd6\x9a\x16\xc7\xd3\xbb\xc2\x8d\xac\xf5\x63\x8f\x6a\x28\x49\xde\x61\x34\xde\x96\x7b\x45\xf1\xf4\x50\xff\x34\xe2\x2d\xa4\xa3\xa2\x68\x18\x44\xf8\xf8\xa8\x51\x4d\x4b\xb6\x1c\xb1\xad\x0f\xb2\xfd\x34\xa2\x6c\x7b\x54\xb7\xf2\x5c\x4c\x64\x05\x4e\x42\x3e\xe1\x97\xdf\x4e\xba\xae\xdb\x3f\x40\x57\xc6\x2d\x99\xb6\xdc\x23\x2f\xbe\x56\xc2\xfd\x90\x83\xcc\x82\x9a\xe1\xe4\x32\x11\x0d\x1a\xc9\xc1\x4b\xe1\x51\xba\xeb\x3b\x5d\xd7\xec\x03\x03\x23\xfc\x88\x53\x3d\xeb\xb5\xe5\xe5\xc4\xde\xdd\x41\x77\xf7\xb6\x45\x45\x07\xd9\x82\xe0\x61\x6e\xb5\x94\x9f\xce\x3a\x92\x1a\xc8\x47\x00\x4c\xb5\xe2\x81\xb7\xae\x70\x9e\x26\x65\x95\xb4\x28\x9a\x09\xd3\xd1\xa4\xeb\xbd\xaa\x1c\x43\x64\xf5\x8e\x7d\x8a\x70\xd9\x2c\x87\x61\x84\x49\x02\x41\x64\x42\x0c\x71\xb1\xd3\x2c\xb1\xfc\xe0\x21\x07\xb8\xcf\x32\x5b\xa4\x54\x8f\x75\xd3\x3c\x66\xe2\x52\x51\x4a\xab\xda\xf4\x74\x81\xc0\x68\x95\x60\x5a\x40\xde\x10\xb3\xd3\x2d\xeb\x6f\xbd\xcf\x88\xb5\x20\xf6\x07\x78\x01\x9b\xc3\x5d\xf4\x8d\x29\x80\x71\x9e\x01\x48\x39\xfd\xf8\x84\x8d\x7d\x8f\x8f\x51\x2d\x04\xda\x3d\x01\xeb\xde\x6d\x2d\x0f\x59\xf7\xd6\xc4\x01\x53\x20\x32\x74\xa8\x83\x78\xea\x65\xbe\xb3\x84\xc5\x29\xd6\xc3\xc0\xf6\x99\x53\xf6\xb2\xd7\xa6\x17\x2f\x95\xe3\xeb\x06\xac\xed\xaf\x0a\x40\x59\x4d\x4c\xcd\x72\x2b\xa5\x09\x2f\xd5\xb1\xa1\x41\x19\x38\xe1\x09\x43\xa1\x67\xf3\xf1\x90\x7a\x5d\xc0\xa3\x10\xeb\x4c\x61\x75\x3e\xc1\x3d\x50\x20\xc9\xff\x9b\x2d\xaf\x43\x1f\x27\xe8\xe1\x88\xb9\x8c\x1d\x48\xcc\xc9\x1a\xd6\x6a\xbc\x81\xbd\xe8\xdc\x2a\x53\xac\x76\x09\xbf\xbe\xf1\x62\x2b\x8a\x88\x67\xf9\x44\x56\x1f\xe5\x4e\x8d\x93\xd3\x02\x9e\xbc\x04\x6f\x25\x0c\xd4\xd3\x83\xf7\x85\xa5\x9a\x99\x74\x08\xe5\xd2\x5b\x02\xbe\x9e\xe8\x34\x52\xba\x33\x63\x5b\x1a\x94\x7d\x6f\x07\xed\x38\x81\xa8\x43\xab\x5f\xc6\xf8\xfb\xc1\x7a\x14\x57\x0c\x5f\x0d\x86\x7e\x20\x9b\xd5\x0a\xf6\xb2\x47\x44\x3e\x8c\x16\x75\x13\x7c\x3b\x67\x69\xb4\x72\x06\x49\xeb\x8f\x4d\x0d\x59\xe4\xc2\xb4\x39\xc3\xe0\x1d\xec\xec\xa9\x12\x4b\x3b\x8f\xbd\x76\x3e\x89\x82\x1f\xf1\xe2\x35\x71\x9c\x9a\xb1\x22\x78\x08\xe0\xb4\x17\xf0\xf0\x18\x45\x97\x82\xee\xea\x1f\x80\x12\x4f\x53\xc7\x6c\xb6\x6d\x16\xc5\x70\xbe\xa6\x3c\xd8\x7d\x47\x94\xfc\xe6\xdf\x70\xb3\x1d\x53\x1e\xbe\xb1\xff\xcd\x80\x3c\x91\xb4\x28\x01\x71\x38\x5e\xc9\x1f\x2e\xdd\x51\x4b\xf3\xe2\xeb\x50\xaa\x82\x7d\x6e\xad\xea\x09\x5e\xfb\xfe\xa9\xce\xda\x73\x98\x65\xdd\x46\x85\x32\x4d\xd5\x12\x98\xa0\xbb\x66\xd0\x05\x97\x45\xb6\x48\x8d\x2f\x90\xe2\x46\x44\xe9\xc4\xf6\xff\xa4\x76\x9c\x8b\xeb\xca\x11\x98\xee\x8a\xd9\xc4\xa4\x97\xcd\x5b\x2e\x63\xa6\x32\xd8\x41\x4e\xe0\x04\xe4\xcc\x8a\x12\xa0\x15\x96\x29\x1a\x52\x50\x6c\x3e\x74\x35\x96\x7f\xa1\x50\x53\x54\x5c\x26\x12\xf4\xe5\xb9\xa3\x16\xec\xd4\xc6\x4f\xbf\x2a\x22\xee\x76\x36\xab\x3c\x0e\x1d\x0d\x30\x5a\xee\xd1\xce\xac\x82\x0d\x76\x53\x45\x17\xb9\xc4\x5a\x29\x7b\xee\x7f\x25\x8e\x72\x45\x83\xa6\xfc\x6f\xb5\xd2\x6e\x25\x42\x4e\xbb\xd6\x0b\xd1\x6e\x5f\xb6\x38\x18\x8e\x39\x8c\xcf\x01\x28\xa7\xda\x38\x81\x8f\x99\x73\x10\x78\xc7\x9e\x29\xa3\x57\x8d\x9f\x13\x36\x8e\xbf\xd5\x4e\x0e\xd8\x91\xac\xdb\x88\x6b\x22\x15\x35\x8d\x8c\x26\xc2\x38\x89\xdd\x2c\x03\x62\x92\xb4\x36\xe9\x67\x43\xdd\x09\x25\x83\x93\x92\x32\x08\x7e\xe9\x5a\x22\x22\xf6\xe1\xd6\x3f\x78\x26\x08\xe4\xe7\x19\xa4\x0a\x1d\x5d\xd5\x88\x5e\x17\xd6\xb9\x40\x3f\x1c\x22\xde\xbb\xc3\xc0\x58\x4b\xad\xc2\x17\xdb\x17\x68\x7c\x44\x9a\x77\xc6\xf4\x5c\xde\xf8\xc0\x36\x17\x86\x71\xf2\xba\x08\x3f\xb9\x96\xfd\x9f\xbb\x06\xa8\x99\x45\x83\x75\xdb\xf1\x34\xe0\xd3\x35\x35\x2a\xa3\x3e\x27\x53\xe1\xae\x04\x6a\xda\x95\x61\xc4\x28\x6d\xe9\x08\xd5\x30\xf1\xa6\xfa\x0e\xb9\xd7\x13\xf5\xf8\xce\xd9\xfb\xe7\x90\x72\x30\xcd\x47\xfd\x07\x95\x54\x1a\x1c\x39\xf9\x4b\x08\xb6\x44\xf8\xaf\x91\x75\x1f\xd3\x1e\xc6\x78\xf9\xca\x6f\xc6\xa1\xca\x83\x5d\x0e\x4f\xdd\xf6\xe4\x59\xb6\x21\xab\xc7\x9e\xa6\xf3\xa1\xc1\x63\x05\xab\x5e\x2c\x32\x70\x62\x36\x77\x53\x4f\x2f\x72\xa6\x79\x3a\xd8\x57\xcb\x10\x9a\xc1\xb1\x8d\x02\x00\x96\x72\x5f\x27\xc5\xe3\xd1\xc2\xb0\xb8\x8b\xf2\xd4\x11\x36\x1f\xb0\x54\x7b\x16\xa4\x0e\x59\x08\xf9\x66\x53\x89\x09\x74\x3f\x4b\xe9\x4a\x31\x5d\xd9\x96\x78\x13\x73\xc0\xc4\xec\x64\x14\x35\x3a\x53\x54\x83\xd7\x99\xbb\xf8\x45\xdc\x74\x28\xdb\x36\x0f\xb0\x28\xd2\x0f\x17\x8f\x60\xab\x61\xbb\xdf\x20\xe4\x7f\x75\x71\x3e\xa9\x82\xd4\xbc\x6f\x9e\xa6\xc4\x77\x19\x76\xe6\xd6\xa0\xa0\x49\xfa\xee\x07\x40\xa6\xd6\x86\x5e\xff\x7f\xcb\x53\x40\xc6\x56\xe7\x44\x0f\xc8\xcb\x57\x91\x57\x99\x56\x12\x79\x2d\xf0\x5f\xf8\xa7\xa0\xa2\x3d\xee\x36\x35\xd4\x60\xc6\x47\x2c\xea\x92\x61\x75\x32\x02\xfe\xdd\x48\xe6\x2f\xd7\x72\xdc\x00\xc2\x87\x10\x04\x3e\x44\x2e\x1a\x6e\xc2\xbc\xff\x35\x4d\xc7\xf7\x6a\x7c\xa5\xfc\x4a\x83\x4b\x0b\xff\x9f\x82\xa3\x8b\xde\x28\xda\x6f\x85\xef\xcc\x8f\x09\xdb\xbb\x04\xc1\x88\x87\x59\x25\x06\x60\x5a\xf8\x9d\xb8\x9d\x2f\xd7\x2d\x5e\xbb\xa3\x28\x64\xb7\x56\xf3\xdd\x7a\xe9\xf6\x14\xd1\xac\xc4\x05\xc5\x2d\xc0\x7e\x59\x5d\x1d\x68\x0e\x7b\xd0\x1b\x41\xed\x14\xc5\x8a\x08\xdc\xe6\xe0\x13\x5d\x22\x42\x7e\x57\xda\xe8\x47\x7f\x78\x07\x91\xcd\xe6\xb5\x83\xfe\xad\x18\xd1\xa3\x2f\xef\x9a\x49\x4e\x8b\x50\xb0\x75\x32\x84\x15\x62\x8c\xb8\xb9\x7f\xa6\x8d\x50\x68\xb1\xee\x60\xef\x52\x88\x49\x3a\x7a\x62\xf9\x45\x6d\xe6\x2a\x92\x81\xc8\x32\x44\x84\x6d\x27\xe4\xd7\x48\x0e\x12\x25\x23\x63\xc0\x7f\xac\x8f\x35\x3a\xad\x91\x30\xbd\x3b\x13\x48\xfe\x55\x96\xbb\xd7\xdd\xff\x4a\xc1\xed\xc5\xbe\xe8\xa1\xa7\x05\x1f\xe7\x8e\xd8\xf8\x62\x73\x86\x7f\xd3\xda\xb2\xae\xa2\xb1\x84\x4f\x7b\x33\x67\x5f\xec\xf6\x83\x7f\xcf\x97\xd8\x35\x99\x9f\x32\xb3\x7e\x2b\x58\xe7\x25\xa8\x55\x01\x7c\x1b\x67\xe4\x18\x03\x92\x61\xe2\x49\x65\xf5\x02\x79\xd7\x6f\xa3\xcc\x65\xed\xf0\xc4\x47\xe4\x9f\xc9\xea\x21\xf0\x46\x14\xe0\xa3\x45\xf2\xb6\x4d\x60\x92\x33\x8f\x31\x38\xe9\xc4\x1b\x68\x30\x8e\xfe\x8d\x6f\x08\xaa\x1f\xf9\x67\xd8\x38\xd7\x1a\x0c\x5a\xba\xd5\x75\xfc\xdf\xdc\xb2\x04\xf2\x4c\x58\x83\xca\x30\x14\xe3\x9f\xfc\x69\x99\x26\x10\x62\xc3\x58\xb4\xd0\xf8\xf4\x5b\x9a\xf8\x23\xd0\xed\x2f\xd3\x4e\x81\x04\xc7\x4b\x27\xd1\xf0\x0e\x9a\x4a\x60\x7f\x18\x50\x92\x99\xd0\xf4\xed\x27\xea\x18\x2f\x4f\x72\x1e\x96\x53\x46\xad\x1a\xed\xb8\x8d\xcc\xa9\xb2\x22\x77\x5d\x3b\x6d\x49\xa1\x39\x79\xc7\x1e\xf9\x75\x88\xd4\x34\x2a\xaa\x3d\x9d\x95\x22\x8b\xb1\xac\x94\x82\x93\x7f\x16\xb3\x4e\x39\xd1\xaa\xfa\x2f\x33\xb6\x1f\x2d\xc4\x71\x39\xd9\xc7\x8b\xd7\xfa\x8f\xa6\xc0\x75\x78\x2e\x9e\x03\x2b\xd5\x20\x8f\x2f\x69\xb0\xad\xc7\x8f\x0e\x46\xc1\x36\xdb\x2a\x91\x8e\x2c\x5c\xea\x53\xc2\x24\xdd\xb5\x67\x4d\x9c\x2e\x41\xe8\x9d\xa4\xcc\xf0\xa4\xe0\x86\xcf\x81\x2e\xe3\xef\x6a\x70\xde\x4e\x32\xeb\xc8\xed\x31\x39\x8b\x11\x4e\x92\x7a\x61\x9b\x57\xaf\x41\xbf\x0d\x90\x0a\x76\xc2\x6f\xdc\x09\x39\xf3\x3d\xc3\xb9\x1c\x49\x7b\x57\x5d\x8e\x3b\xe6\xec\xe6\xe3\xe6\x8d\x2f\x8e\x16\x41\x2b\xc3\xc8\x5e\x25\x84\x6e\xc4\x5d\xd4\x71\xe6\x3d\x4f\xe2\x05\x30\x0a\xab\x6d\xf4\x7d\x4a\x56\x45\x3c\x77\xb2\xcb\xaa\xd0\x75\x2f\x2d\xbe\x3d\xd0\x07\xf9\x34\x07\x17\xd0\x09\x50\xe1\x30\x6b\x29\x02\x7e\xcf\xf6\xa8\x7f\xb3\xe7\x97\x52\xa1\xd1\xbd\x8b\x19\x19\x4a\xf7\xd5\x08\x4d\xc9\x64\xee\x19\xfd\xa5\xc4\x8c\x2a\x97\x5f\xc4\xca\x2e\x68\x92\x79\x7f\x16\xb4\x71\x4e\x1c\xf3\x8d\x07\x6d\x00\xf6\xaf\x09\x4d\xf1\x7f\x97\xa7\xbd\xbe\x70\xe3\xc5\x83\xb1\x01\x03\x53\x32\x88\x84\x47\xd8\x30\x6a\x6f\x9f\xac\x38\x36\x81\xf0\x90\x25\x63\x16\x3b\x30\x47\x89\xec\xfe\x91\x52\xd1\x2c\x24\xd2\x69\x24\x11\x14\x63\x1f\x67\xb8\xc5\xb7\xd2\x54\xe2\x23\xf1\xcc\x85\xe8\x26\xd7\x5c\xf2\x30\x63\x46\x27\x50\xa5\x7e\xc6\xdc\xbd\xd6\x6a\xb4\x8b\xb8\x49\x16\x23\x1d\xa1\xb0\x31\x48\x1b\x02\xa1\x1b\x1e\xa5\x53\x92\xf2\xe2\x0c\x7b\x0e\x87\xea\x33\x00\x5c\xda\x84\x43\x37\x13\x5b\x8c\x32\x78\xe1\xdc\xc9\xfd\xb1\xb9\x55\x6f\x20\xaa\xe8\xf1\xcf\xad\xc1\xe6\x62\x24\x59\xfc\x72\x4a\x2f\xe7\x61\x81\x65\x1d\x13\x3c\x66\x6b\xdf\xf0\x4a\x0d\x9b\xf7\x65\x1c\xe8\x15\x96\xeb\xa2\x9a\x85\x4d\x27\x99\x3e\xfa\x2f\x63\x0f\x57\x8b\x04\x56\x83\xea\x3e\x19\x86\xe7\xf9\x93\x40\xe0\xd6\xb5\x80\x7c\x59\x74\x28\x43\x0a\x8c\x46\x00\xe7\x8a\xda\xf8\x87\x82\xcb\xd6\xd6\xac\x12\x89\x27\xc2\x4e\x74\x72\x21\x53\xf2\x4c\x1c\xd9\xcc\xa9\xe7\x6a\x9f\x3d\xd9\xfc\x7f\x7d\x43\x01\xd9\xe3\x73\x07\x94\xd9\x93\x98\x76\xf8\xa4\xf1\x89\xff\x0f\xab\x22\x9a\x40\x94\x73\x66\x02\x2d\xc0\x44\x38\xf8\x33\xf3\x92\x9f\xd0\xce\xf7\x8c\xe3\x29\x01\x1e\x8e\x28\x4d\x49\xa1\x81\x4b\x73\x00\xaa\xe5\x2e\x9e\xeb\xe0\xae\x66\x9d\x52\x45\xb7\x4f\xb4\xe5\x2a\xe0\x28\x7d\x9c\xdb\xb9\x81\xc8\xe2\xc4\xee\x87\x0a\x40\x63\x21\xf8\x64\x7e\xbd\x98\xa9\x5f\x33\x92\xef\x50\xfc\xf4\xc5\x87\x39\x3d\x85\xf5\x9c\x11\x8a\x9d\xd2\x1c\xb8\xe1\x8e\x31\xf3\x96\x82\x41\x77\x6d\xea\x56\xb4\x74\xb1\x09\xe2\x9a\x34\xbb\x85\x0c\xd9\x2a\x5d\x4d\xea\xbb\x9e\x51\xf9\x3f\x1e\xeb\x81\x35\xad\x49\x92\xd8\x88\x5e\x80\xd4\xed\x32\xd0\x6b\xa0\xf6\xfb\x48\xff\x97\xda\x4d\x01\xa7\xe6\x0a\xa5\xa1\x39\x95\x46\xa0\x0d\xe7\x62\x85\xb6\xdf\x33\x65\x6a\xc1\xb4\xd7\xa2\xbf\x22\x98\x9a\x1a\xe8\x4d\x66\x0d\xd6\x68\xad\x1f\x29\x57\x97\xc4\x55\x48\x34\x79\xe8\x06\x8f\xc4\xfc\x4d\x86\xa7\xf3\x99\x3f\x5d\xa7\x90\x6e\x4a\x36\x8b\x14\x47\x32\xea\x39\xe5\x22\x67\x62\x1f\x4d\x0d\x81\x41\xb3\x87\xe9\x98\x84\xa5\xfe\x70\xd3\x5a\x39\x34\x3c\xf8\x29\x53\x9f\x49\xbd\x5f\x01\x07\x5c\xde\xf1\xe1\x06\x39\xd2\x17\x8d\x2b\xeb\xe9\xae\x78\x3b\x3a\xb1\x54\x72\x12\xe8\x3b\x8d\xbc\x6b\xf8\x7e\x6f\x33\xb7\x5f\xde\x69\x3c\x37\xe4\x06\x50\xa7\x3d\x86\xd8\x27\x44\xae\x04\xa4\xac\xc4\xb4\x50\xd7\x3e\xaa\x04\xf3\xe9\x3f\xa0\x3a\x85\xa5\xd7\x2f\xe8\xee\xc2\xcd\xdf\x65\xc3\xc3\x6b\x37\x52\xd0\x7c\xec\x24\xcb\x3d\x27\x94\x69\x46\xc2\xc6\xe6\xee\xb5\x92\xa8\x7f\x08\x31\x6b\xbc\xf1\x53\xac\xe2\xdc\xa9\xc3\x71\x45\xa1\x63\x48\x20\x07\xfc\xd6\xb6\x89\xd7\x0a\xdd\x1e\xca\xf9\x29\x65\xba\x5e\xab\xc7\x60\x4f\x47\xde\x82\xb4\x5b\xc8\x8e\x45\x3c\x0f\xe4\x89\x85\x2a\xb4\x1b\x3c\x86\x06\x8b\x33\xc7\x80\x3f\xdc\xac\xdf\x97\x1c\x68\x09\x4a\xa1\xd7\xfc\x1b\x1b\x0a\xee\x69\xec\x98\xc2\x5d\x72\x30\xbd\x66\x8f\x58\x54\xc8\x6e\x09\xff\x32\xd7\x5e\x00\xa2\x0e\x55\xf5\x37\x09\xe1\xac\x6e\x5b\xd9\x1a\xd6\x40\x4b\xc9\xeb\x99\x45\xf0\xaa\xbe\x30\x2b\xd5\xeb\xb6\x47\xd8\x86\xa7\xa3\xce\x3b\x75\xdd\x11\x51\x72\x12\x8f\x4f\xba\x0b\xce\x3a\xf5\xe9\xa9\x8c\x30\x61\x08\xb1\xca\x77\xab\xbe\x57\x0a\xd8\x2d\xcf\x1c\x21\x94\x94\x25\xb5\xda\x50\x0e\x54\xf4\x59\x2e\x0c\x0d\x76\x62\x13\xcf\x25\xce\xea\x9b\x2b\x7d\x86\x21\x39\xe0\x3d\x64\x99\x16\x3c\xca\xe6\x88\x84\x90\xfb\x32\x85\x1e\x5b\xe4\xc3\x1c\xe0\x8b\x5a\xd6\xf1\x93\xb2\xa6\x1b\xd0\x36\xc0\xdd\x33\x0a\xe5\xdb\xa4\x18\x04\x80\x17\x55\x76\x75\xcc\x2a\xaf\xd2\x6c\x10\x5b\x49\x39\x67\x19\x42\x9b\xc3\x7f\x17\x0e\xd5\x75\x2a\xea\x86\x13\x19\x31\xef\xca\xf9\x14\x7d\x0d\x1e\xb3\x3c\xc1\x6a\xc6\x84\x53\x8d\xf5\x97\xbd\x1a\xeb\xb1\x6a\xd0\x00\xdf\xc1\xbe\x70\x3e\xd0\x95\x1b\xd9\x2b\x14\xd5\x81\x54\x08\xe5\x6d\x4d\x2b\x30\x09\x00\x1a\x12\x7f\x03\x54\xfb\x87\x37\x51\xe4\x4e\x53\x33\x22\xb0\x32\x67\x77\xb0\x3e\x5b\x18\xea\x93\x9f\x0b\x0a\xfb\x32\x2a\xc8\x41\xbc\x54\xde\xf0\x49\xe6\x90\x9d\x99\xff\xfd\xd0\x5c\x4e\x60\x53\x5a\x4c\xbf\x26\xa3\xf3\x47\x0c\x28\x3e\xf0\x9c\x6a\x17\x93\x9a\x49\x32\xa8\x04\xb6\x0e\x8e\x4c\xdb\x3c\xc2\x18\xca\xc0\xa5\x7e\xaa\x6c\xdb\xfb\xcb\x5f\x73\xc5\x99\x13\xed\xd2\xb6\xbd\x34\x81\xcc\xbb\x16\x9d\xa3\xfc\x3f\x8f\xf9\x91\x91\x1c\x3f\x2f\xbb\x1e\xf8\xf7\x08\x9f\x52\x14\x13\x36\x93\xe8\xd7\x20\x4f\xfb\x60\xee\xef\xdf\xdc\x2f\xce\x29\x33\xba\x47\x73\xd1\x8f\x03\x48\x4d\xbb\xf8\xa8\x30\x49\x4e\x6e\xe8\xd5\x0c\xca\x69\x31\x39\xf7\x56\x7a\xc4\xb2\xd1\x1d\x09\x26\xeb\x86\xcd\xef\x0b\x1a\xfb\x4a\xa5\x5c\x03\xdc\xff\x71\xda\x96\xef\x54\x88\x2b\xc7\xe5\x4b\xba\xd6\xa5\xb9\x7c\x48\x7a\x4e\x80\xe2\xff\x26\xdb\x90\xd8\xa0\x42\xe6\x14\x19\x1f\xdf\x5b\xbd\x94\x01\xf0\xcf\x84\x54\xce\x0e\xbb\xe5\x0a\xc6\x02\xd7\x90\x18\x42\xf5\x03\x5b\x49\x6b\xad\xa5\x76\x3e\x27\xf9\x8e\xe1\x1d\x36\xc7\x31\x81\xc7\x47\x8a\x83\xdb\x91\x0e\xe5\x55\x7d\x93\x6f\x85\x8b\x22\xaf\xc0\xdb\x81\x0f\x9c\xea\x2c\xed\xac\x8a\x6e\xc3\x1f\x1c\x3c\x2e\xe3\xdb\x06\xa9\x75\xe4\x58\xa9\x65\x23\x86\x72\xdd\x50\x32\x93\x41\x1f\xea\x6c\x41\xb5\x4b\x6b\x37\xdd\xd3\xa8\xbc\xf3\x32\x50\xe4\x33\x8d\x32\x19\x2b\xd5\xb6\xf5\x05\x17\xd6\xa2\xbe\x1c\xf8\x22\x2a\xae\x35\xa9\x65\x1d\x78\xae\x03\x77\x91\xf1\xad\x0c\x11\x60\xac\xe5\xff\x4e\x78\x1e\x9d\xca\x0e\xf3\x28\xe5\xd2\xbd\x81\x38\xf5\x2d\xb4\x3b\x43\xb4\xef\xf6\x8a\xb0\xd2\x77\x4d\xb1\xce\xd0\x37\x1d\x86\x62\xb1\x9a\x6a\xa5\x7d\x66\x5f\xf5\x11\x21\x51\x6f\xa2\x87\x2a\x30\xb2\x2f\xe3\x5a\x89\xf5\x68\xa3\x1d\x75\x66\x20\x53\xd3\xe3\xa8\x84\xa9\x85\x90\xa1\x10\x26\xd4\x14\xf8', 2)
| 4,497.25
| 17,935
| 0.750292
| 4,486
| 17,989
| 3.005573
| 0.059073
| 0.008455
| 0.008678
| 0.00712
| 0.002893
| 0.00178
| 0.00178
| 0
| 0
| 0
| 0
| 0.311012
| 0.0005
| 17,989
| 3
| 17,936
| 5,996.333333
| 0.438877
| 0
| 0
| 0
| 0
| 0.333333
| 0.99483
| 0.99483
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
164c6a736a37306e21f3b70be77bf916caee8bea
| 33,182
|
py
|
Python
|
scmer/_owlqn.py
|
KChen-lab/marker-selection
|
3d6b015655095540ba3a0ab7e8682d4355065ad5
|
[
"MIT"
] | 26
|
2020-12-04T05:10:47.000Z
|
2022-02-25T02:26:27.000Z
|
scmer/_owlqn.py
|
KChen-lab/marker-selection
|
3d6b015655095540ba3a0ab7e8682d4355065ad5
|
[
"MIT"
] | 1
|
2021-01-23T21:29:03.000Z
|
2021-01-24T12:04:12.000Z
|
scmer/_owlqn.py
|
KChen-lab/marker-selection
|
3d6b015655095540ba3a0ab7e8682d4355065ad5
|
[
"MIT"
] | 1
|
2021-11-19T00:38:10.000Z
|
2021-11-19T00:38:10.000Z
|
import torch
from functools import reduce
from torch.optim.optimizer import Optimizer
from typing import Union
import numpy as np
def _cubic_interpolate(x1, f1, g1, x2, f2, g2, bounds=None):
# ported from https://github.com/torch/optim/blob/master/polyinterp.lua
# Compute bounds of interpolation area
if bounds is not None:
xmin_bound, xmax_bound = bounds
else:
xmin_bound, xmax_bound = (x1, x2) if x1 <= x2 else (x2, x1)
# Code for most common case: cubic interpolation of 2 points
# w/ function and derivative values for both
# Solution in this case (where x2 is the farthest point):
# d1 = g1 + g2 - 3*(f1-f2)/(x1-x2);
# d2 = sqrt(d1^2 - g1*g2);
# min_pos = x2 - (x2 - x1)*((g2 + d2 - d1)/(g2 - g1 + 2*d2));
# t_new = min(max(min_pos,xmin_bound),xmax_bound);
d1 = g1 + g2 - 3 * (f1 - f2) / (x1 - x2)
d2_square = d1 ** 2 - g1 * g2
if d2_square >= 0:
d2 = d2_square.sqrt()
if x1 <= x2:
min_pos = x2 - (x2 - x1) * ((g2 + d2 - d1) / (g2 - g1 + 2 * d2))
else:
min_pos = x1 - (x1 - x2) * ((g1 + d2 - d1) / (g1 - g2 + 2 * d2))
return min(max(min_pos, xmin_bound), xmax_bound)
else:
return (xmin_bound + xmax_bound) / 2.
def _strong_wolfe(obj_func,
x,
t,
d,
f,
g,
gtd,
c1=1e-4,
c2=0.9,
tolerance_change=1e-9,
max_ls=25):
# ported from https://github.com/torch/optim/blob/master/lswolfe.lua
d_norm = d.abs().max()
g = g.clone(memory_format=torch.contiguous_format)
# evaluate objective and gradient using initial step
f_new, g_new = obj_func(x, t, d)
ls_func_evals = 1
gtd_new = g_new.dot(d)
# bracket an interval containing a point satisfying the Wolfe criteria
t_prev, f_prev, g_prev, gtd_prev = 0, f, g, gtd
done = False
ls_iter = 0
while ls_iter < max_ls:
# check conditions
if f_new > (f + c1 * t * gtd) or (ls_iter > 1 and f_new >= f_prev):
bracket = [t_prev, t]
bracket_f = [f_prev, f_new]
bracket_g = [g_prev, g_new.clone(memory_format=torch.contiguous_format)]
bracket_gtd = [gtd_prev, gtd_new]
break
if abs(gtd_new) <= -c2 * gtd:
bracket = [t]
bracket_f = [f_new]
bracket_g = [g_new]
done = True
break
if gtd_new >= 0:
bracket = [t_prev, t]
bracket_f = [f_prev, f_new]
bracket_g = [g_prev, g_new.clone(memory_format=torch.contiguous_format)]
bracket_gtd = [gtd_prev, gtd_new]
break
# interpolate
min_step = t + 0.01 * (t - t_prev)
max_step = t * 10
tmp = t
t = _cubic_interpolate(
t_prev,
f_prev,
gtd_prev,
t,
f_new,
gtd_new,
bounds=(min_step, max_step))
# next step
t_prev = tmp
f_prev = f_new
g_prev = g_new.clone(memory_format=torch.contiguous_format)
gtd_prev = gtd_new
f_new, g_new = obj_func(x, t, d)
ls_func_evals += 1
gtd_new = g_new.dot(d)
ls_iter += 1
# reached max number of iterations?
if ls_iter == max_ls:
bracket = [0, t]
bracket_f = [f, f_new]
bracket_g = [g, g_new]
# zoom phase: we now have a point satisfying the criteria, or
# a bracket around it. We refine the bracket until we find the
# exact point satisfying the criteria
insuf_progress = False
# find high and low points in bracket
low_pos, high_pos = (0, 1) if bracket_f[0] <= bracket_f[-1] else (1, 0)
while not done and ls_iter < max_ls:
# line-search bracket is so small
if abs(bracket[1] - bracket[0]) * d_norm < tolerance_change:
break
# compute new trial value
t = _cubic_interpolate(bracket[0], bracket_f[0], bracket_gtd[0],
bracket[1], bracket_f[1], bracket_gtd[1])
# test that we are making sufficient progress:
# in case `t` is so close to boundary, we mark that we are making
# insufficient progress, and if
# + we have made insufficient progress in the last step, or
# + `t` is at one of the boundary,
# we will move `t` to a position which is `0.1 * len(bracket)`
# away from the nearest boundary point.
eps = 0.1 * (max(bracket) - min(bracket))
if min(max(bracket) - t, t - min(bracket)) < eps:
# interpolation close to boundary
if insuf_progress or t >= max(bracket) or t <= min(bracket):
# evaluate at 0.1 away from boundary
if abs(t - max(bracket)) < abs(t - min(bracket)):
t = max(bracket) - eps
else:
t = min(bracket) + eps
insuf_progress = False
else:
insuf_progress = True
else:
insuf_progress = False
# Evaluate new point
f_new, g_new = obj_func(x, t, d)
ls_func_evals += 1
gtd_new = g_new.dot(d)
ls_iter += 1
if f_new > (f + c1 * t * gtd) or f_new >= bracket_f[low_pos]:
# Armijo condition not satisfied or not lower than lowest point
bracket[high_pos] = t
bracket_f[high_pos] = f_new
bracket_g[high_pos] = g_new.clone(memory_format=torch.contiguous_format)
bracket_gtd[high_pos] = gtd_new
low_pos, high_pos = (0, 1) if bracket_f[0] <= bracket_f[1] else (1, 0)
else:
if abs(gtd_new) <= -c2 * gtd:
# Wolfe conditions satisfied
done = True
elif gtd_new * (bracket[high_pos] - bracket[low_pos]) >= 0:
# old high becomes new low
bracket[high_pos] = bracket[low_pos]
bracket_f[high_pos] = bracket_f[low_pos]
bracket_g[high_pos] = bracket_g[low_pos]
bracket_gtd[high_pos] = bracket_gtd[low_pos]
# new point becomes new low
bracket[low_pos] = t
bracket_f[low_pos] = f_new
bracket_g[low_pos] = g_new.clone(memory_format=torch.contiguous_format)
bracket_gtd[low_pos] = gtd_new
# return stuff
t = bracket[low_pos]
f_new = bracket_f[low_pos]
g_new = bracket_g[low_pos]
return f_new, g_new, t, ls_func_evals
class OWLQN0(Optimizer):
"""Implements L-BFGS algorithm, heavily inspired by `minFunc
<https://www.cs.ubc.ca/~schmidtm/Software/minFunc.html>`.
.. warning::
This optimizer doesn't support per-parameter options and parameter
groups (there can be only one).
.. warning::
Right now all parameters have to be on a single device. This will be
improved in the future.
.. note::
This is a very memory intensive optimizer (it requires additional
``param_bytes * (history_size + 1)`` bytes). If it doesn't fit in memory
try reducing the history size, or use a different algorithm.
Arguments:
lr (float): learning rate (default: 1)
lasso (float): lasso (L1 regularization) strength (default: 1.)
(L2 regularization is differentiable and thus can be in the loss)
max_iter (int): maximal number of iterations per optimization step
(default: 20)
max_eval (int): maximal number of function evaluations per optimization
step (default: max_iter * 1.25).
tolerance_grad (float): termination tolerance on first order optimality
(default: 1e-5).
tolerance_change (float): termination tolerance on function
value/parameter changes (default: 1e-9).
history_size (int): update history size (default: 100).
line_search_fn (str): either 'strong_wolfe' or None (default: None).
"""
def __init__(self,
params,
lr=1,
lasso=1.,
max_iter=20,
max_eval=None,
tolerance_grad=1e-7,
tolerance_change=1e-9,
history_size=100,
line_search_fn=None,
print_callback=print):
if max_eval is None:
max_eval = max_iter * 5 // 4
defaults = dict(
lr=lr,
lasso=lasso,
max_iter=max_iter,
max_eval=max_eval,
tolerance_grad=tolerance_grad,
tolerance_change=tolerance_change,
history_size=history_size,
line_search_fn=line_search_fn)
super(OWLQN0, self).__init__(params, defaults)
if len(self.param_groups) != 1:
raise ValueError("OWLQN doesn't support per-parameter options "
"(parameter groups)")
self._params = self.param_groups[0]['params']
self._numel_cache = None
def _numel(self):
if self._numel_cache is None:
self._numel_cache = reduce(lambda total, p: total + p.numel(), self._params, 0)
return self._numel_cache
def _gather_flat_grad(self):
lasso = self.param_groups[0]['lasso']
# SL: with pseudo gradient
views = []
for p in self._params:
if p.grad is None:
view = p.new(p.numel()).zero_()
elif p.grad.is_sparse:
view = p.grad.to_dense().view(-1)
else:
view = p.grad.view(-1)
# SL: find psuedo-gradient
# SL: at point 0
view = view.clone()
border_case = (p == 0.).reshape(-1)
go_left = (view > lasso)
go_right = (view < -lasso)
stay = ~(go_left | go_right)
view[go_left & border_case] -= lasso # go left, but be slower (gradient - lasso > 0)
view[go_right & border_case] += lasso # go right, but also be slower (gradient + lasso < 0)
view[stay & border_case] = 0.
# SL: at non-zero points
view[(p > 0.).reshape(-1)] += lasso
view[(p < 0.).reshape(-1)] -= lasso
views.append(view)
return torch.cat(views, 0)
def _add_grad(self, step_size, update):
# SL: WITH PROJECTION
offset = 0
for p in self._params:
numel = p.numel()
# view as to avoid deprecated pointwise semantics
sign0 = torch.sign(p) # SL: get sign before update
p.add_(update[offset:offset + numel].view_as(p), alpha=step_size)
p[sign0 != torch.sign(p)] = 0. # SL: project to 0 if sign changed
offset += numel
assert offset == self._numel()
def _clone_param(self):
return [p.clone(memory_format=torch.contiguous_format) for p in self._params]
def _set_param(self, params_data):
for p, pdata in zip(self._params, params_data):
p.copy_(pdata)
def _directional_evaluate(self, closure, x, t, d):
self._add_grad(t, d)
# SL
l1 = 0.
for p in self._params:
l1 = l1 + torch.norm(p, 1)
l1 = l1 * self.param_groups[0]['lasso']
loss = float(closure()) + l1
flat_grad = self._gather_flat_grad()
self._set_param(x)
return loss, flat_grad
@torch.no_grad()
def step(self, closure):
"""Performs a single optimization step.
Arguments:
closure (callable): A closure that reevaluates the model
and returns the loss.
"""
assert len(self.param_groups) == 1
# Make sure the closure is always called with grad enabled
closure = torch.enable_grad()(closure)
group = self.param_groups[0]
lr = group['lr']
max_iter = group['max_iter']
max_eval = group['max_eval']
tolerance_grad = group['tolerance_grad']
tolerance_change = group['tolerance_change']
line_search_fn = group['line_search_fn']
history_size = group['history_size']
# NOTE: LBFGS has only global state, but we register it as state for
# the first param, because this helps with casting in load_state_dict
state = self.state[self._params[0]]
state.setdefault('func_evals', 0)
state.setdefault('n_iter', 0)
# evaluate initial f(x) and df/dx
l1 = 0. # SL: Add the L1 regularization
for p in self._params: # SL: ..
l1 = l1 + torch.norm(p, 1) # SL: ..
l1 = l1 * self.param_groups[0]['lasso'] # SL: ..
orig_loss = closure() + l1 # SL: ..
loss = float(orig_loss)
current_evals = 1
state['func_evals'] += 1
flat_grad = self._gather_flat_grad()
opt_cond = flat_grad.abs().max() <= tolerance_grad
# optimal condition
if opt_cond:
return orig_loss
# tensors cached in state (for tracing)
d = state.get('d')
t = state.get('t')
old_dirs = state.get('old_dirs')
old_stps = state.get('old_stps')
ro = state.get('ro')
H_diag = state.get('H_diag')
prev_flat_grad = state.get('prev_flat_grad')
prev_loss = state.get('prev_loss')
n_iter = 0
# optimize for a max of max_iter iterations
while n_iter < max_iter:
# keep track of nb of iterations
n_iter += 1
state['n_iter'] += 1
############################################################
# compute gradient descent direction
############################################################
if state['n_iter'] == 1:
d = flat_grad.neg()
old_dirs = []
old_stps = []
ro = []
H_diag = 1
else:
# do lbfgs update (update memory)
y = flat_grad.sub(prev_flat_grad)
s = d.mul(t)
ys = y.dot(s) # y*s
if ys > 1e-10:
# updating memory
if len(old_dirs) == history_size:
# shift history by one (limited-memory)
old_dirs.pop(0)
old_stps.pop(0)
ro.pop(0)
# store new direction/step
old_dirs.append(y)
old_stps.append(s)
ro.append(1. / ys)
# update scale of initial Hessian approximation
H_diag = ys / y.dot(y) # (y*y)
# compute the approximate (L-BFGS) inverse Hessian
# multiplied by the gradient
num_old = len(old_dirs)
if 'al' not in state:
state['al'] = [None] * history_size
al = state['al']
# iteration in L-BFGS loop collapsed to use just one buffer
q = flat_grad.neg()
for i in range(num_old - 1, -1, -1):
al[i] = old_stps[i].dot(q) * ro[i]
q.add_(old_dirs[i], alpha=-al[i])
# multiply by initial Hessian
# r/d is the final direction
d = r = torch.mul(q, H_diag)
for i in range(num_old):
be_i = old_dirs[i].dot(r) * ro[i]
r.add_(old_stps[i], alpha=al[i] - be_i)
if prev_flat_grad is None:
prev_flat_grad = flat_grad.clone(memory_format=torch.contiguous_format)
else:
prev_flat_grad.copy_(flat_grad)
prev_loss = loss
############################################################
# compute step length
############################################################
# reset initial guess for step size
if state['n_iter'] == 1:
t = min(1., 1. / flat_grad.abs().sum()) * lr
else:
t = lr
# directional derivative
gtd = flat_grad.dot(d) # g * d
# directional derivative is below tolerance
if gtd > -tolerance_change:
break
# optional line search: user function
ls_func_evals = 0
if line_search_fn is not None:
# perform line search, using user function
if line_search_fn != "strong_wolfe":
raise RuntimeError("only 'strong_wolfe' is supported")
else:
x_init = self._clone_param()
def obj_func(x, t, d):
return self._directional_evaluate(closure, x, t, d)
loss, flat_grad, t, ls_func_evals = _strong_wolfe(
obj_func, x_init, t, d, loss, flat_grad, gtd)
self._add_grad(t, d)
opt_cond = flat_grad.abs().max() <= tolerance_grad
else:
# no line search, simply move with fixed-step
self._add_grad(t, d)
if n_iter != max_iter:
# re-evaluate function only if not in last iteration
# the reason we do this: in a stochastic setting,
# no use to re-evaluate that function here
l1 = 0. # SL: ..
for p in self._params: # SL:
l1 = l1 + torch.norm(p, 1) # SL:
l1 = l1 * self.param_groups[0]['lasso'] # SL:
with torch.enable_grad():
loss = float(closure())
loss += l1 # SL:
flat_grad = self._gather_flat_grad()
opt_cond = flat_grad.abs().max() <= tolerance_grad
ls_func_evals = 1
# update func eval
current_evals += ls_func_evals
state['func_evals'] += ls_func_evals
############################################################
# check conditions
############################################################
if n_iter == max_iter:
break
if current_evals >= max_eval:
break
# optimal condition
if opt_cond:
break
# lack of progress
if d.mul(t).abs().max() <= tolerance_change:
break
if abs(loss - prev_loss) < tolerance_change:
break
state['d'] = d
state['t'] = t
state['old_dirs'] = old_dirs
state['old_stps'] = old_stps
state['ro'] = ro
state['H_diag'] = H_diag
state['prev_flat_grad'] = prev_flat_grad
state['prev_loss'] = prev_loss
return orig_loss
class OWLQN0_masked(Optimizer):
"""Implements L-BFGS algorithm, heavily inspired by `minFunc
<https://www.cs.ubc.ca/~schmidtm/Software/minFunc.html>`.
.. warning::
This optimizer doesn't support per-parameter options and parameter
groups (there can be only one).
.. warning::
Right now all parameters have to be on a single device. This will be
improved in the future.
.. note::
This is a very memory intensive optimizer (it requires additional
``param_bytes * (history_size + 1)`` bytes). If it doesn't fit in memory
try reducing the history size, or use a different algorithm.
Arguments:
lr (float): learning rate (default: 1)
lasso (float): lasso (L1 regularization) strength (default: 1.)
(L2 regularization is differentiable and thus can be in the loss)
max_iter (int): maximal number of iterations per optimization step
(default: 20)
max_eval (int): maximal number of function evaluations per optimization
step (default: max_iter * 1.25).
tolerance_grad (float): termination tolerance on first order optimality
(default: 1e-5).
tolerance_change (float): termination tolerance on function
value/parameter changes (default: 1e-9).
history_size (int): update history size (default: 100).
line_search_fn (str): either 'strong_wolfe' or None (default: None).
"""
def __init__(self,
params,
lr=1,
lasso=None,
mask=None,
max_iter=20,
max_eval=None,
tolerance_grad=1e-7,
tolerance_change=1e-9,
history_size=100,
line_search_fn=None,
print_callback=print):
if max_eval is None:
max_eval = max_iter * 5 // 4
defaults = dict(
lr=lr,
lasso=lasso,
mask=mask,
max_iter=max_iter,
max_eval=max_eval,
tolerance_grad=tolerance_grad,
tolerance_change=tolerance_change,
history_size=history_size,
line_search_fn=line_search_fn)
super(OWLQN0_masked, self).__init__(params, defaults)
if len(self.param_groups) != 1:
raise ValueError("OWLQN doesn't support per-parameter options "
"(parameter groups)")
self._params = self.param_groups[0]['params']
self._numel_cache = None
def _numel(self):
if self._numel_cache is None:
self._numel_cache = reduce(lambda total, p: total + p.numel(), self._params, 0)
return self._numel_cache
def _gather_flat_grad(self):
lasso = self.param_groups[0]['lasso']
mask = self.param_groups[0]['mask']
# SL: with pseudo gradient
views = []
for p in self._params:
if p.grad is None:
view = p.new(p.numel()).zero_()
elif p.grad.is_sparse:
view = p.grad.to_dense().view(-1)
else:
view = p.grad.view(-1)
# only apply to masked ones
# SL: find psuedo-gradient
# SL: at point 0
view = view.clone()
border_case = (p == 0.).reshape(-1)
go_left = (view > lasso)
go_right = (view < -lasso)
stay = ~(go_left | go_right)
view[go_left & border_case & mask] -= lasso[go_left & border_case & mask] # go left, but be slower (gradient - lasso > 0)
view[go_right & border_case & mask] += lasso[go_right & border_case & mask] # go right, but also be slower (gradient + lasso < 0)
view[stay & border_case & mask] = 0.
# SL: at non-zero points
gt0 = (p > 0.).reshape(-1)
lt0 = (p < 0.).reshape(-1)
view[gt0] += lasso[gt0]
view[lt0] -= lasso[lt0]
views.append(view)
return torch.cat(views, 0)
def _add_grad(self, step_size, update):
# SL: WITH PROJECTION
mask = self.param_groups[0]['mask']
offset = 0
for p in self._params:
numel = p.numel()
# view as to avoid deprecated pointwise semantics
sign0 = torch.sign(p) # SL: get sign before update
p.add_(update[offset:offset + numel].view_as(p), alpha=step_size)
p[(sign0 != torch.sign(p)) & mask] = 0. # SL: project to 0 if sign changed
offset += numel
assert offset == self._numel()
def _clone_param(self):
return [p.clone(memory_format=torch.contiguous_format) for p in self._params]
def _set_param(self, params_data):
for p, pdata in zip(self._params, params_data):
p.copy_(pdata)
def _directional_evaluate(self, closure, x, t, d):
self._add_grad(t, d)
# SL
l1 = 0.
for p in self._params:
l1 = l1 + torch.norm(p * self.param_groups[0]['lasso'], 1)
# l1 = l1 * self.param_groups[0]['lasso']
loss = float(closure()) + l1
flat_grad = self._gather_flat_grad()
self._set_param(x)
return loss, flat_grad
@torch.no_grad()
def step(self, closure):
"""Performs a single optimization step.
Arguments:
closure (callable): A closure that reevaluates the model
and returns the loss.
"""
assert len(self.param_groups) == 1
# Make sure the closure is always called with grad enabled
closure = torch.enable_grad()(closure)
group = self.param_groups[0]
lr = group['lr']
max_iter = group['max_iter']
max_eval = group['max_eval']
tolerance_grad = group['tolerance_grad']
tolerance_change = group['tolerance_change']
line_search_fn = group['line_search_fn']
history_size = group['history_size']
# NOTE: LBFGS has only global state, but we register it as state for
# the first param, because this helps with casting in load_state_dict
state = self.state[self._params[0]]
state.setdefault('func_evals', 0)
state.setdefault('n_iter', 0)
# evaluate initial f(x) and df/dx
l1 = 0. # SL: Add the L1 regularization
for p in self._params: # SL: ..
l1 = l1 + torch.norm(p * self.param_groups[0]['lasso'], 1) # SL: ..
orig_loss = closure() + l1 # SL: ..
loss = float(orig_loss)
current_evals = 1
state['func_evals'] += 1
flat_grad = self._gather_flat_grad()
opt_cond = flat_grad.abs().max() <= tolerance_grad
# optimal condition
if opt_cond:
return orig_loss
# tensors cached in state (for tracing)
d = state.get('d')
t = state.get('t')
old_dirs = state.get('old_dirs')
old_stps = state.get('old_stps')
ro = state.get('ro')
H_diag = state.get('H_diag')
prev_flat_grad = state.get('prev_flat_grad')
prev_loss = state.get('prev_loss')
n_iter = 0
# optimize for a max of max_iter iterations
while n_iter < max_iter:
# keep track of nb of iterations
n_iter += 1
state['n_iter'] += 1
############################################################
# compute gradient descent direction
############################################################
if state['n_iter'] == 1:
d = flat_grad.neg()
old_dirs = []
old_stps = []
ro = []
H_diag = 1
else:
# do lbfgs update (update memory)
y = flat_grad.sub(prev_flat_grad)
s = d.mul(t)
ys = y.dot(s) # y*s
if ys > 1e-10:
# updating memory
if len(old_dirs) == history_size:
# shift history by one (limited-memory)
old_dirs.pop(0)
old_stps.pop(0)
ro.pop(0)
# store new direction/step
old_dirs.append(y)
old_stps.append(s)
ro.append(1. / ys)
# update scale of initial Hessian approximation
H_diag = ys / y.dot(y) # (y*y)
# compute the approximate (L-BFGS) inverse Hessian
# multiplied by the gradient
num_old = len(old_dirs)
if 'al' not in state:
state['al'] = [None] * history_size
al = state['al']
# iteration in L-BFGS loop collapsed to use just one buffer
q = flat_grad.neg()
for i in range(num_old - 1, -1, -1):
al[i] = old_stps[i].dot(q) * ro[i]
q.add_(old_dirs[i], alpha=-al[i])
# multiply by initial Hessian
# r/d is the final direction
d = r = torch.mul(q, H_diag)
for i in range(num_old):
be_i = old_dirs[i].dot(r) * ro[i]
r.add_(old_stps[i], alpha=al[i] - be_i)
if prev_flat_grad is None:
prev_flat_grad = flat_grad.clone(memory_format=torch.contiguous_format)
else:
prev_flat_grad.copy_(flat_grad)
prev_loss = loss
############################################################
# compute step length
############################################################
# reset initial guess for step size
if state['n_iter'] == 1:
t = min(1., 1. / flat_grad.abs().sum()) * lr
else:
t = lr
# directional derivative
gtd = flat_grad.dot(d) # g * d
# directional derivative is below tolerance
if gtd > -tolerance_change:
break
# optional line search: user function
ls_func_evals = 0
if line_search_fn is not None:
# perform line search, using user function
if line_search_fn != "strong_wolfe":
raise RuntimeError("only 'strong_wolfe' is supported")
else:
x_init = self._clone_param()
def obj_func(x, t, d):
return self._directional_evaluate(closure, x, t, d)
loss, flat_grad, t, ls_func_evals = _strong_wolfe(
obj_func, x_init, t, d, loss, flat_grad, gtd)
self._add_grad(t, d)
opt_cond = flat_grad.abs().max() <= tolerance_grad
else:
# no line search, simply move with fixed-step
self._add_grad(t, d)
if n_iter != max_iter:
# re-evaluate function only if not in last iteration
# the reason we do this: in a stochastic setting,
# no use to re-evaluate that function here
l1 = 0. # SL: ..
for p in self._params: # SL:
l1 = l1 + torch.norm(p * self.param_groups[0]['lasso'], 1) # SL:
#l1 = l1 * self.param_groups[0]['lasso'] # SL:
with torch.enable_grad():
loss = float(closure())
loss += l1 # SL:
flat_grad = self._gather_flat_grad()
opt_cond = flat_grad.abs().max() <= tolerance_grad
ls_func_evals = 1
# update func eval
current_evals += ls_func_evals
state['func_evals'] += ls_func_evals
############################################################
# check conditions
############################################################
if n_iter == max_iter:
break
if current_evals >= max_eval:
break
# optimal condition
if opt_cond:
break
# lack of progress
if d.mul(t).abs().max() <= tolerance_change:
break
if abs(loss - prev_loss) < tolerance_change:
break
state['d'] = d
state['t'] = t
state['old_dirs'] = old_dirs
state['old_stps'] = old_stps
state['ro'] = ro
state['H_diag'] = H_diag
state['prev_flat_grad'] = prev_flat_grad
state['prev_loss'] = prev_loss
return orig_loss
def OWLQN(params,
lr=1,
lasso: Union[float, int, np.ndarray, list] = 1.,
max_iter=20,
max_eval=None,
tolerance_grad=1e-7,
tolerance_change=1e-9,
history_size=100,
line_search_fn=None,
print_callback=print,
use_gpu=False):
"""
Dispatching to proper OWLQN class depending on the structure of lasso
:param params:
:param lr:
:param lasso:
:param max_iter:
:param max_eval:
:param tolerance_grad:
:param tolerance_change:
:param history_size:
:param line_search_fn:
:param print_callback:
:return:
"""
if isinstance(lasso, float) or isinstance(lasso, int):
return OWLQN0(params, lr, float(lasso),
max_iter, max_eval, tolerance_grad, tolerance_change, history_size, line_search_fn,
print_callback)
if isinstance(lasso, list) or isinstance(lasso, np.ndarray):
mask = (np.ndarray != 0.)
lasso = torch.tensor(lasso)
if use_gpu:
lasso = lasso.cuda()
return OWLQN0_masked(params, lr, lasso, mask,
max_iter, max_eval, tolerance_grad, tolerance_change, history_size, line_search_fn,
print_callback)
| 36.951002
| 142
| 0.514285
| 4,068
| 33,182
| 3.997542
| 0.10767
| 0.027549
| 0.014758
| 0.015742
| 0.820563
| 0.812016
| 0.792953
| 0.792953
| 0.787419
| 0.779916
| 0
| 0.017791
| 0.369869
| 33,182
| 897
| 143
| 36.992196
| 0.75996
| 0.241848
| 0
| 0.786596
| 0
| 0
| 0.030758
| 0
| 0
| 0
| 0
| 0
| 0.007055
| 1
| 0.037037
| false
| 0
| 0.008818
| 0.007055
| 0.082892
| 0.008818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1670fb8697e7e4f8f6138107ed519fe058aa9428
| 105
|
py
|
Python
|
apraw/models/__init__.py
|
DankDumpster/APRAW
|
4d301a4dc400edfb9e4562600062970ac4e7eb25
|
[
"MIT"
] | 2
|
2020-04-21T23:44:19.000Z
|
2020-04-22T00:24:36.000Z
|
apraw/models/__init__.py
|
DankDumpster/APRAW
|
4d301a4dc400edfb9e4562600062970ac4e7eb25
|
[
"MIT"
] | null | null | null |
apraw/models/__init__.py
|
DankDumpster/APRAW
|
4d301a4dc400edfb9e4562600062970ac4e7eb25
|
[
"MIT"
] | 1
|
2020-04-21T19:10:37.000Z
|
2020-04-21T19:10:37.000Z
|
from .redditor import Redditor
from .subreddits import Subreddits
from .reddit.redditor import Redditor
| 35
| 37
| 0.838095
| 13
| 105
| 6.769231
| 0.384615
| 0.318182
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12381
| 105
| 3
| 37
| 35
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
168712f3667cd14e28b7cf1eb726dc1aae9a6767
| 2,991
|
py
|
Python
|
project/tests/scripts/big_tests.py
|
LeDron12/c2eo
|
4f0dc6ed79df0739bd834eda6a0f77f3caf4292c
|
[
"MIT"
] | 12
|
2021-08-05T12:12:09.000Z
|
2022-03-08T13:33:53.000Z
|
project/tests/scripts/big_tests.py
|
LeDron12/c2eo
|
4f0dc6ed79df0739bd834eda6a0f77f3caf4292c
|
[
"MIT"
] | 26
|
2021-08-23T10:25:37.000Z
|
2022-03-30T12:56:08.000Z
|
project/tests/scripts/big_tests.py
|
LeDron12/c2eo
|
4f0dc6ed79df0739bd834eda6a0f77f3caf4292c
|
[
"MIT"
] | 12
|
2021-08-17T09:20:07.000Z
|
2022-03-31T13:37:28.000Z
|
from utests import unittest
from system_vars import *
import fun
class BigTests(unittest.TestCase):
def test_global_integer_vars(self):
fun.showname()
c_types = []
values = []
for _type in integer:
c_types += [_type]*7
values += [_type[-1], _type[-2], _type[-1] - 18, _type[-2] + 18,
_type[-1] + (_type[-2] + 1 - _type[-1]) // 4 * 1,
_type[-1] + (_type[-2] + 1 - _type[-1]) // 4 * 2,
_type[-1] + (_type[-2] + 1 - _type[-1]) // 4 * 3]
declaration, names = fun.generate_vars(c_types, values)
fun.generate1(c_types, declaration, names)
fun.generate2(c_types, declaration, names)
res, msg = fun.compile_run()
self.assertTrue(res, msg=msg)
res, msg = fun.compare()
self.assertTrue(res, msg=msg)
def test_static_integer_vars(self):
fun.showname()
c_types = []
values = []
for _type in integer:
c_types += [_type]*7
values += [_type[-1], _type[-2], _type[-1] - 18, _type[-2] + 18,
_type[-1] + (_type[-2] + 1 - _type[-1]) // 4 * 1,
_type[-1] + (_type[-2] + 1 - _type[-1]) // 4 * 2,
_type[-1] + (_type[-2] + 1 - _type[-1]) // 4 * 3]
static = [True]*len(c_types)
declaration, names = fun.generate_vars(c_types, values, static)
fun.generate1(c_types, declaration, names)
fun.generate2(c_types, declaration, names)
res, msg = fun.compile_run()
self.assertTrue(res, msg=msg)
res, msg = fun.compare()
self.assertTrue(res, msg=msg)
def test_global_real_vars(self):
fun.showname()
c_types = []
values = []
for _type in real:
c_types += [_type]*7
values += [_type[-1], _type[-2], f'{_type[-1]} * 2', f'{_type[-2]} * 2',
_type[-2] // 2, _type[-1] // 2, 1 / 9]
declaration, names = fun.generate_vars(c_types, values)
fun.generate1(c_types, declaration, names)
fun.generate2(c_types, declaration, names)
res, msg = fun.compile_run()
self.assertTrue(res, msg=msg)
res, msg = fun.compare()
self.assertTrue(res, msg=msg)
def test_static_real_vars(self):
fun.showname()
c_types = []
values = []
for _type in real:
c_types += [_type]*7
values += [_type[-1], _type[-2], f'{_type[-1]} * 2', f'{_type[-2]} * 2',
_type[-2] // 2, _type[-1] // 2, 1 / 9]
static = [True]*len(c_types)
declaration, names = fun.generate_vars(c_types, values, static)
fun.generate1(c_types, declaration, names)
fun.generate2(c_types, declaration, names)
res, msg = fun.compile_run()
self.assertTrue(res, msg=msg)
res, msg = fun.compare()
self.assertTrue(res, msg=msg)
| 38.346154
| 86
| 0.51655
| 376
| 2,991
| 3.87234
| 0.119681
| 0.090659
| 0.061813
| 0.068681
| 0.930632
| 0.930632
| 0.930632
| 0.930632
| 0.930632
| 0.930632
| 0
| 0.043651
| 0.325978
| 2,991
| 77
| 87
| 38.844156
| 0.678571
| 0
| 0
| 0.885714
| 0
| 0
| 0.02006
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.057143
| false
| 0
| 0.042857
| 0
| 0.114286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16a67e9ba32dad890b0ed6a6df134ccabb5e31ca
| 1,517
|
py
|
Python
|
slavdict/dictionary/migrations/0038_auto_20180928_1556.py
|
slavdict/slavdict
|
893886b80de413cb2bb6c0af5adba9c55aa6a7af
|
[
"CC0-1.0"
] | 1
|
2022-01-17T17:26:25.000Z
|
2022-01-17T17:26:25.000Z
|
slavdict/dictionary/migrations/0038_auto_20180928_1556.py
|
slavdict/slavdict
|
893886b80de413cb2bb6c0af5adba9c55aa6a7af
|
[
"CC0-1.0"
] | 8
|
2020-02-12T13:26:05.000Z
|
2022-02-10T19:28:07.000Z
|
slavdict/dictionary/migrations/0038_auto_20180928_1556.py
|
slavdict/slavdict
|
893886b80de413cb2bb6c0af5adba9c55aa6a7af
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-28 15:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dictionary', '0037_auto_20180901_0953'),
]
operations = [
migrations.AlterField(
model_name='meaning',
name='special_case',
field=models.CharField(blank=True, choices=[(b'', b''), ('\u0418\u043c\u0435\u043d\u0430', ((b'a', '\u043a\u0430\u043d\u043e\u043d\u0438\u0447.'), (b'h', '\u0438\u043c\u044f \u0441\u043e\u0431\u0441\u0442\u0432.'), (b'i', '\u0442\u043e\u043f\u043e\u043d\u0438\u043c'))), ('\u0427\u0430\u0441\u0442\u0438 \u0440\u0435\u0447\u0438', ((b'f', '\u043d\u0430\u0440\u0435\u0447.'), (b'm', '\u0441\u043e\u044e\u0437'), (b'b', '\u043f\u0440\u0435\u0434\u043b.'), (b'c', '\u0447\u0430\u0441\u0442.'), (b'g', '\u043c\u0435\u0436\u0434.'))), ('\u0424\u043e\u0440\u043c\u044b \u0441\u043b\u043e\u0432\u0430', ((b'd', '\u0434\u0430\u0442.'), (b'k', '\u043c\u043d.'), (b'e', '\u0442\u0432\u043e\u0440. \u0435\u0434. \u0432 \u0440\u043e\u043b\u0438 \u043d\u0430\u0440\u0435\u0447.'), (b'l', '\u0432 \u0440\u043e\u043b\u0438 \u043d\u0430\u0440\u0435\u0447.'), (b'n', '\u0432 \u0440\u043e\u043b\u0438 \u043f\u0440\u0438\u043b.'))), ('\u0414\u0440\u0443\u0433\u043e\u0435', ((b'j', '\u043f\u0440\u0435\u0438\u043c\u0443\u0449.'),))], default=b'', max_length=1, verbose_name='\u043e\u0441\u043e\u0431\u044b\u0435 \u0441\u043b\u0443\u0447\u0430\u0438'),
),
]
| 72.238095
| 1,144
| 0.651285
| 217
| 1,517
| 4.520737
| 0.391705
| 0.071356
| 0.061162
| 0.061162
| 0.155963
| 0.130479
| 0.103976
| 0.103976
| 0.103976
| 0.103976
| 0
| 0.389341
| 0.109427
| 1,517
| 20
| 1,145
| 75.85
| 0.336788
| 0.045485
| 0
| 0
| 1
| 0.25
| 0.620069
| 0.525952
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16b0958e539fa1021c1417fa09f683ffae405da8
| 10,170
|
py
|
Python
|
ethereum/accounts.py
|
sudohostname/ETH
|
abb3e73e3bb895308f16ae26cdaf4c326fee6eef
|
[
"MIT"
] | null | null | null |
ethereum/accounts.py
|
sudohostname/ETH
|
abb3e73e3bb895308f16ae26cdaf4c326fee6eef
|
[
"MIT"
] | null | null | null |
ethereum/accounts.py
|
sudohostname/ETH
|
abb3e73e3bb895308f16ae26cdaf4c326fee6eef
|
[
"MIT"
] | null | null | null |
import requests as req
# accountslist = ["0x00000000219ab540356cbb839cbe05303d7705fa", "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2",
# "0xda9dfa130df4de4673b89022ee50ff26f6ea73cf","0x73bceb1cd57c711feac4224d062b0f6ff338501e","0xbe0eb53f46cd790cd13851d5eff43d12404d33e8","0x9bf4001d307dfd62b26a2f1307ee0c0307632d59","0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5","0x61edcdf5bb737adffe5043706e7c5bb1f1a56eea","0xdc24316b9ae028f1497c275eb9192a3ea0f67022","0x1b3cb81e51011b549d78bf720b0d924ac763a7c2","0x07ee55aa48bb72dcc6e9d78256648910de513eca","0x8484ef722627bf18ca5ae6bcf031c23e6e922b30","0x011b6e24ffb0b5f5fcc564cf4183c5bbbc96d515","0xc61b9bb3a7a0767e3179713f3a5c7a9aedce193c","0xe92d1a43df510f82c66382592a047d288f85226f","0xf977814e90da44bfa03b6295a0616a897441acec","0x742d35cc6634c0532925a3b844bc454e4438f44e","0xdf9eb223bafbe5c5271415c75aecd68c21fe3d7f","0x6262998ced04146fa42253a5c0af90ca02dfd2a3","0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae","0xa929022c9107643515f5c777ce9a910f0d1e490c","0x220866b1a2219f40e72f5c628b65d54268ca3a9d","0xca8fa8f0b631ecdb18cda619c4fc9d197c8affca","0x176f3dab24a159341c0509bb36b833e7fdd0a132","0x3bfc20f0b9afcace800d73d2191166ff16540258","0x8103683202aa8da10536036edef04cdd865c225e","0xa7efae728d2936e78bda97dc267687568dd593f3","0x0a4c79ce84202b03e95b7a692e5d728d83c44c76","0x7d24796f7ddb17d73e8b1d0a3bbd103fba2cb2fe","0x2b6ed29a95753c3ad948348e3e7b1a251080ffb9","0xbddf00563c9abd25b576017f08c46982012f12be","0x189b9cbd4aff470af2c0102f365fc1823d857965","0x9845e1909dca337944a0272f1f9f7249833d2d19","0x0548f59fee79f8832c299e01dca5c76f034f558e","0x59448fe20378357f206880c58068f095ae63d5a5","0x0c23fc0ef06716d2f8ba19bc4bed56d045581f2d","0x2faf487a4414fe77e2327f0bf4ae2a264a776ad2","0x66f820a414680b5bcda5eeca5dea238543f42054","0x558553d54183a8542f7832742e7b4ba9c33aa1e6","0x98ec059dc3adfbdd63429454aeb0c990fba4a128", "0xb29380ffc20696729b7ab8d093fa1e2ec14dfe2b", "0xcdbf58a9a9b54a2c43800c50c7192946de858321", "0x19184ab45c40c2920b0e0e31413b9434abd243ed","0x90a9e09501b70570f9b11df2a6d4f047f8630d6d","0xbf3aeb96e164ae67e763d9e050ff124e7c3fdd28","0xf774da4418c6dca3051f0e7570829b24214e730b","0xb8808f9e9b88497ec522304055cd537a0913f6a0","0x1db92e2eebc8e0c075a02bea49a2935bcd2dfcf4","0xdc1487e092caba080c6badafaa75a58ce7a2ec34","0x36a85757645e8e8aec062a1dee289c7d615901ca","0xd69b0089d9ca950640f5dc9931a41a5965f00303","0xa7e4fecddc20d83f36971b67e13f1abc98dfcfa6","0x7da82c7ab4771ff031b66538d2fb9b0b047f6cf9","0x5b5b69f4e0add2df5d2176d7dbd20b4897bc7ec4","0x78605df79524164911c144801f41e9811b7db73d","0x3ba25081d3935fcc6788e6220abcace39d58d95d","0xef22c14f46858d5ac61326497b056974167f2ee1","0xfd898a0f677e97a9031654fc79a27cb5e31da34a","0x701c484bfb40ac628afa487b6082f084b14af0bd","0x4b4a011c420b91260a272afd91e54accdafdfc1d", "0xa8dcc0373822b94d7f57326be24ca67bafcaad6b","0x367989c660881e1ca693730f7126fe0ffc0963fb","0x0ff64c53d295533a37f913bb78be9e2adc78f5fe","0x844ada2ed8ecd77a1a9c72912df0fcb8b8c495a7", "0x9c2fc4fc75fa2d7eb5ba9147fa7430756654faa9", "0xb20411c403687d1036e05c8a7310a0f218429503","0x9a1ed80ebc9936cee2d3db944ee6bd8d407e7f9f","0xb8cda067fabedd1bb6c11c626862d7255a2414fe","0xb9fa6e54025b4f0829d8e1b42e8b846914659632","0xba18ded5e0d604a86428282964ae0bb249ceb9d0","0xfe01a216234f79cfc3bea7513e457c6a9e50250d","0x0c05ec4db907cfb91b2a1a29e7b86688b7568a6d","0xc4cf565a5d25ee2803c9b8e91fc3d7c62e79fe69","0xe04cf52e9fafa3d9bf14c407afff94165ef835f7","0x77afe94859163abf0b90725d69e904ea91446c7b","0x19d599012788b991ff542f31208bab21ea38403e","0xca582d9655a50e6512045740deb0de3a7ee5281f","0xd05e6bf1a00b5b4c9df909309f19e29af792422b","0x0f00294c6e4c30d9ffc0557fec6c586e6f8c3935","0xeb2b00042ce4522ce2d1aacee6f312d26c4eb9d6", "0x7ae92148e79d60a0749fd6de374c8e81dfddf792","0x554f4476825293d4ad20e02b54aca13956acc40a","0x9cf36e93a8e2b1eaaa779d9965f46c90b820048c","0x4756eeebf378046f8dd3cb6fa908d93bfd45f139","0x091933ee1088cdf5daace8baec0997a4e93f0dd6","0xa0efb63be0db8fc11681a598bf351a42a6ff50e0","0x8b83b9c4683aa4ec897c569010f09c6d04608163","0x550cd530bc893fc6d2b4df6bea587f17142ab64e","0x828103b231b39fffce028562412b3c04a4640e64","0xe35b0ef92452c353dbb93775e0df97cedf873c72","0x0518f5bb058f6215a0ff5f4df54dae832d734e04","0x0e86733eab26cfcc04bb1752a62ec88e910b4cf5","0xb8b6fe7f357adeab950ac6c270ce340a46989ce1","0xeddf8eb4984cc27407a568cae1c78a1ddb0c2c1b","0x7145cfedcf479bede20c0a4ba1868c93507d5786","0x2fa9f9efc767650aace0422668444c3ff63e1f8d","0xd57479b8287666b44978255f1677e412d454d4f0","0x4baf012726cb5ec7dda57bc2770798a38100c44d","0x67fde691b11e96083fc52a5b74d73f0695811a3b","0x5fc90190177ea60c0aebc0ee7157541399b46d10"]
accounts = "0x00000000219ab540356cbb839cbe05303d7705fa, 0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2, 0xda9dfa130df4de4673b89022ee50ff26f6ea73cf,0x73bceb1cd57c711feac4224d062b0f6ff338501e,0xbe0eb53f46cd790cd13851d5eff43d12404d33e8,0x9bf4001d307dfd62b26a2f1307ee0c0307632d59,0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5,0x61edcdf5bb737adffe5043706e7c5bb1f1a56eea,0xdc24316b9ae028f1497c275eb9192a3ea0f67022,0x1b3cb81e51011b549d78bf720b0d924ac763a7c2,0x07ee55aa48bb72dcc6e9d78256648910de513eca,0x8484ef722627bf18ca5ae6bcf031c23e6e922b30,0x011b6e24ffb0b5f5fcc564cf4183c5bbbc96d515,0xc61b9bb3a7a0767e3179713f3a5c7a9aedce193c,0xe92d1a43df510f82c66382592a047d288f85226f,0xf977814e90da44bfa03b6295a0616a897441acec,0x742d35cc6634c0532925a3b844bc454e4438f44e,0xdf9eb223bafbe5c5271415c75aecd68c21fe3d7f,0x6262998ced04146fa42253a5c0af90ca02dfd2a3,0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae"
accounts2 = "0xa929022c9107643515f5c777ce9a910f0d1e490c,0x220866b1a2219f40e72f5c628b65d54268ca3a9d,0xca8fa8f0b631ecdb18cda619c4fc9d197c8affca,0x176f3dab24a159341c0509bb36b833e7fdd0a132,0x3bfc20f0b9afcace800d73d2191166ff16540258,0x8103683202aa8da10536036edef04cdd865c225e,0xa7efae728d2936e78bda97dc267687568dd593f3,0x0a4c79ce84202b03e95b7a692e5d728d83c44c76,0x7d24796f7ddb17d73e8b1d0a3bbd103fba2cb2fe,0x2b6ed29a95753c3ad948348e3e7b1a251080ffb9,0xbddf00563c9abd25b576017f08c46982012f12be,0x189b9cbd4aff470af2c0102f365fc1823d857965,0x9845e1909dca337944a0272f1f9f7249833d2d19,0x0548f59fee79f8832c299e01dca5c76f034f558e,0x59448fe20378357f206880c58068f095ae63d5a5,0x0c23fc0ef06716d2f8ba19bc4bed56d045581f2d,0x2faf487a4414fe77e2327f0bf4ae2a264a776ad2,0x66f820a414680b5bcda5eeca5dea238543f42054,0x558553d54183a8542f7832742e7b4ba9c33aa1e6,0x98ec059dc3adfbdd63429454aeb0c990fba4a128"
accounts3 = "0xb29380ffc20696729b7ab8d093fa1e2ec14dfe2b, 0xcdbf58a9a9b54a2c43800c50c7192946de858321, 0x19184ab45c40c2920b0e0e31413b9434abd243ed,0x90a9e09501b70570f9b11df2a6d4f047f8630d6d,0xbf3aeb96e164ae67e763d9e050ff124e7c3fdd28,0xf774da4418c6dca3051f0e7570829b24214e730b,0xb8808f9e9b88497ec522304055cd537a0913f6a0,0x1db92e2eebc8e0c075a02bea49a2935bcd2dfcf4,0xdc1487e092caba080c6badafaa75a58ce7a2ec34,0x36a85757645e8e8aec062a1dee289c7d615901ca,0xd69b0089d9ca950640f5dc9931a41a5965f00303,0xa7e4fecddc20d83f36971b67e13f1abc98dfcfa6,0x7da82c7ab4771ff031b66538d2fb9b0b047f6cf9,0x5b5b69f4e0add2df5d2176d7dbd20b4897bc7ec4,0x78605df79524164911c144801f41e9811b7db73d,0x3ba25081d3935fcc6788e6220abcace39d58d95d,0xef22c14f46858d5ac61326497b056974167f2ee1,0xfd898a0f677e97a9031654fc79a27cb5e31da34a,0x701c484bfb40ac628afa487b6082f084b14af0bd, 0x4b4a011c420b91260a272afd91e54accdafdfc1d"
accounts4 = "0xa8dcc0373822b94d7f57326be24ca67bafcaad6b,0x367989c660881e1ca693730f7126fe0ffc0963fb,0x0ff64c53d295533a37f913bb78be9e2adc78f5fe,0x844ada2ed8ecd77a1a9c72912df0fcb8b8c495a7, 0x9c2fc4fc75fa2d7eb5ba9147fa7430756654faa9, 0xb20411c403687d1036e05c8a7310a0f218429503,0x9a1ed80ebc9936cee2d3db944ee6bd8d407e7f9f,0xb8cda067fabedd1bb6c11c626862d7255a2414fe,0xb9fa6e54025b4f0829d8e1b42e8b846914659632,0xba18ded5e0d604a86428282964ae0bb249ceb9d0,0xfe01a216234f79cfc3bea7513e457c6a9e50250d,0x0c05ec4db907cfb91b2a1a29e7b86688b7568a6d,0xc4cf565a5d25ee2803c9b8e91fc3d7c62e79fe69,0xe04cf52e9fafa3d9bf14c407afff94165ef835f7,0x77afe94859163abf0b90725d69e904ea91446c7b,0x19d599012788b991ff542f31208bab21ea38403e,0xca582d9655a50e6512045740deb0de3a7ee5281f,0xd05e6bf1a00b5b4c9df909309f19e29af792422b,0x0f00294c6e4c30d9ffc0557fec6c586e6f8c3935,0xeb2b00042ce4522ce2d1aacee6f312d26c4eb9d6"
accounts5 = "0x7ae92148e79d60a0749fd6de374c8e81dfddf792,0x554f4476825293d4ad20e02b54aca13956acc40a,0x9cf36e93a8e2b1eaaa779d9965f46c90b820048c,0x4756eeebf378046f8dd3cb6fa908d93bfd45f139,0x091933ee1088cdf5daace8baec0997a4e93f0dd6,0xa0efb63be0db8fc11681a598bf351a42a6ff50e0,0x8b83b9c4683aa4ec897c569010f09c6d04608163,0x550cd530bc893fc6d2b4df6bea587f17142ab64e,0x828103b231b39fffce028562412b3c04a4640e64,0xe35b0ef92452c353dbb93775e0df97cedf873c72,0x0518f5bb058f6215a0ff5f4df54dae832d734e04,0x0e86733eab26cfcc04bb1752a62ec88e910b4cf5,0xb8b6fe7f357adeab950ac6c270ce340a46989ce1,0xeddf8eb4984cc27407a568cae1c78a1ddb0c2c1b,0x7145cfedcf479bede20c0a4ba1868c93507d5786,0x2fa9f9efc767650aace0422668444c3ff63e1f8d,0xd57479b8287666b44978255f1677e412d454d4f0,0x4baf012726cb5ec7dda57bc2770798a38100c44d,0x67fde691b11e96083fc52a5b74d73f0695811a3b,0x5fc90190177ea60c0aebc0ee7157541399b46d10"
def query(account):
apikey = 'ZF51552PZUZZNIB3AWHT183I4VYPZG17EA'
q = "https://api.etherscan.io/api?module=account&action=balancemulti&address=" + account + "&tag=latest&apikey=" + apikey
r = req.get(q)
_r = r.json()
_r = _r.get("result")
return _r
# q = "https://api.etherscan.io/api?module=account&action=balancemulti&address=" + accounts + "&tag=latest&apikey=" + apikey
# q2 = "https://api.etherscan.io/api?module=account&action=balancemulti&address=" + accounts2 + "&tag=latest&apikey=" + apikey
# q3 = "https://api.etherscan.io/api?module=account&action=balancemulti&address=" + accounts3 + "&tag=latest&apikey=" + apikey
# q4 = "https://api.etherscan.io/api?module=account&action=balancemulti&address=" + accounts4 + "&tag=latest&apikey=" + apikey
# q5 = "https://api.etherscan.io/api?module=account&action=balancemulti&address=" + accounts5 + "&tag=latest&apikey=" + apikey
# r = req.get(q)
# r2 = req.get(q2)
# r3 = req.get(q3)
# r4 = req.get(q4)
# r5 = req.get(q5)
# _r = r.json()
# _r2 = r2.json()
# _r3 = r3.json()
# _r4 = r4.json()
# _r5 = r5.json()
_r = query(accounts)
_r2 = query(accounts2)
_r3 = query(accounts3)
_r4 = query(accounts4)
_r5 = query(accounts5)
_r = _r + _r2 + _r3 + _r4 + _r5
| 221.086957
| 4,419
| 0.914749
| 380
| 10,170
| 24.428947
| 0.365789
| 0.005171
| 0.010988
| 0.012281
| 0.950124
| 0.950124
| 0.950124
| 0.950124
| 0.943876
| 0.943876
| 0
| 0.523303
| 0.021042
| 10,170
| 45
| 4,420
| 226
| 0.4091
| 0.522026
| 0
| 0
| 0
| 0.263158
| 0.915342
| 0.893867
| 0
| 0
| 0.867231
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.052632
| 0
| 0.157895
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
16bcb1ab4c1b73b5d93272c8f2d769065f67d62c
| 1,847
|
py
|
Python
|
test/test_commands.py
|
nens/threedidepth
|
dbe38acc745202f39741b607000f3d3b1611d434
|
[
"BSD-3-Clause"
] | null | null | null |
test/test_commands.py
|
nens/threedidepth
|
dbe38acc745202f39741b607000f3d3b1611d434
|
[
"BSD-3-Clause"
] | 16
|
2020-10-08T14:49:19.000Z
|
2022-02-28T15:58:33.000Z
|
test/test_commands.py
|
nens/threedidepth
|
dbe38acc745202f39741b607000f3d3b1611d434
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from unittest import mock
import sys
from threedidepth import commands
def test_command(tmpdir):
depth_path = tmpdir.join("waterdepth.tif")
depth_path.ensure(file=True) # "touch" the file
with mock.patch("threedidepth.commands.calculate_waterdepth") as wd:
args = ["threedidepth,", "a", "b", "c", "d"]
with mock.patch.object(sys, "argv", args):
commands.threedidepth()
wd.assert_called_with(
gridadmin_path="a",
results_3di_path="b",
dem_path="c",
waterdepth_path="d",
calculation_steps=None,
mode=commands.MODE_LIZARD,
progress_func=None,
netcdf=False
)
args.append("--constant")
with mock.patch.object(sys, "argv", args):
commands.threedidepth()
wd.assert_called_with(
gridadmin_path="a",
results_3di_path="b",
dem_path="c",
waterdepth_path="d",
calculation_steps=None,
mode=commands.MODE_CONSTANT,
progress_func=None,
netcdf=False
)
def test_command_with_multiple_steps(tmpdir):
depth_path = tmpdir.join("waterdepth.tif")
depth_path.ensure(file=True) # "touch" the file
with mock.patch("threedidepth.commands.calculate_waterdepth") as wd:
args = ["threedidepth,", "a", "b", "c", "d", "--steps", "1", "2", "3"]
with mock.patch.object(sys, "argv", args):
commands.threedidepth()
wd.assert_called_with(
gridadmin_path="a",
results_3di_path="b",
dem_path="c",
waterdepth_path="d",
calculation_steps=[1, 2, 3],
mode=commands.MODE_LIZARD,
progress_func=None,
netcdf=False
)
| 31.844828
| 78
| 0.567948
| 205
| 1,847
| 4.917073
| 0.278049
| 0.039683
| 0.064484
| 0.056548
| 0.848214
| 0.821429
| 0.821429
| 0.821429
| 0.821429
| 0.740079
| 0
| 0.007776
| 0.303736
| 1,847
| 57
| 79
| 32.403509
| 0.77605
| 0.029778
| 0
| 0.74
| 0
| 0
| 0.106264
| 0.04698
| 0
| 0
| 0
| 0
| 0.06
| 1
| 0.04
| false
| 0
| 0.06
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16c7abe823b990928d1630374a84d00e6615705f
| 98
|
py
|
Python
|
app/solver/__init__.py
|
muhammadzpw/python-8puzzle
|
e2fc5e790e8d5faa2e2fdb13a0a96b9e10f941ae
|
[
"MIT"
] | null | null | null |
app/solver/__init__.py
|
muhammadzpw/python-8puzzle
|
e2fc5e790e8d5faa2e2fdb13a0a96b9e10f941ae
|
[
"MIT"
] | null | null | null |
app/solver/__init__.py
|
muhammadzpw/python-8puzzle
|
e2fc5e790e8d5faa2e2fdb13a0a96b9e10f941ae
|
[
"MIT"
] | null | null | null |
from app.solver.astar import *
from app.solver.solver import *
from app.solver.bestfirst import *
| 24.5
| 34
| 0.785714
| 15
| 98
| 5.133333
| 0.4
| 0.272727
| 0.506494
| 0.493506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 98
| 3
| 35
| 32.666667
| 0.895349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bcb74410429a49642b124642e30add8117f0ab53
| 6,896
|
py
|
Python
|
interfacecosmology/halomassfunction.py
|
rbiswas4/FluctuationsInCosmology
|
ecc973cea3a2516dfcd8efca080824539e920277
|
[
"MIT"
] | null | null | null |
interfacecosmology/halomassfunction.py
|
rbiswas4/FluctuationsInCosmology
|
ecc973cea3a2516dfcd8efca080824539e920277
|
[
"MIT"
] | null | null | null |
interfacecosmology/halomassfunction.py
|
rbiswas4/FluctuationsInCosmology
|
ecc973cea3a2516dfcd8efca080824539e920277
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#import utils
import numpy as np
import psutils as psu
import massfunctions as mf
def ____dndlnM ( M ,
ps ,
z = 0. ,
khmin = 1.0e-5,
khmax = 2.0 ,
logkhint = 0.005 ,
bgtype = "cb",
powerspectrumfile = "LCDM_matterpower.dat" ,
cosmo = None,
deltac = 1.686 ,
fittingform = "Bhattacharya10",
**params ):
"""
returns the mass function dn/dln(M) in units of h^3 Mpc^{-3}
args:
M: mandatory, arraylike
mass bin in units of solar Mass
powerspectrumfile : optional, string, defaults to
LCDM_matterpower.dat
name of the power spectrum file from CAMB
cosmo: optional defaults to Planck13
cosmology model
returns:
numpy array containing mass function in units of Mpc^{-3}
CHANGES:
added argument deltac with default value 1.674
changed back to 1.686 R. Biswas
"""
h = cosmo.H0/100.0
sig = psu.sigmaM (M ,
ps ,
bgtype = bgtype,
khmin = khmin ,
khmax = khmax ,
logkhint = logkhint ,
z = 0. ,
cosmo = cosmo ,
**params)
sigm = sig
dlsinvdlM = -psu.dlnsigmadlnM (M ,
ps ,
z = 0.,
bgtype = bgtype ,
cosmo = cosmo ,
khmin = khmin ,
khmax = khmax ,
logkhint = logkhint ,
**params )
if fittingform == "Bhattacharya10":
f_sigma = mf.__fsigmaBhattacharya (
sigma = sigm,
deltac = deltac ,
z = z ,
A0 = 0.333 ,
a0 = 0.788 ,
p0 = 0.807 ,
q0 = 1.795 ,
alpha1 = 0.11 ,
alpha2 = 0.01 ,
alpha3 = 0.0 ,
alpha4 = 0.0,
Mlow = 6e11 ,
Mhigh = 3e15)
elif fittingform == "MICE" :
f_sigma = mf.fsigmaMICE(sigma = sigm, z = z)
else:
raise ValueError("This fitting form is not implemented")
rhobg = psu.__rhobg( z =0. , bgtype = bgtype,
unittype = "solarmassperMpc3", cosmo = cosmo)
#dndlnM = rhobg *sigm *dlsinvdlM /M
dndlnM = rhobg *f_sigma *dlsinvdlM /M
#dndlnM = dlsinvdlM *f_sigma/M * rhobg
#critdensity(h = cosmo.h, unittype = "solarmassperMpc3")*cosmo.Om0
#dndlnM = rhobg * np.ones(len(sigm))
#dndlnM = rhobg * f_sigma
return dndlnM
def dndlnM ( M ,
ps ,
z = 0. ,
khmin = 1.0e-5,
khmax = 2.0 ,
logkhint = 0.005 ,
bgtype = "cb",
#powerspectrumfile = "LCDM_matterpower.dat" ,
cosmo = None,
deltac = 1.674 ,
fittingform = "Bhattacharya10",
**params ):
"""
returns the mass function dn/dln(M) in units of h^3 Mpc^{-3}
args:
M: mandatory, arraylike
mass in units of solar Mass
ps: provide power spectrum tuple from CAMB at z = 0. The
required tuple is (koverh, ps)
cosmo: cosmology model of type FCPL
deltac : float
delta c value used in the fitting function
fittingform: string, optional defaults to "Bhattacharya10"
choices:
"Bhattacharya10": Bhattacharya 2010 fitting form with
values suggested in paper"
"MICE" : Crocce etal fitting form with
values suggested in paper
returns:
numpy array containing mass function in units of Mpc^{-3}
CHANGES:
added argument deltac with default value 1.674
"""
#z = np.asarray(z, dtype = float)
h = cosmo.H0/100.0
#rhocr = critdensity( h = h ,
# unittype = "solarmassperMpc3")
sig = psu.sigmaM (M ,
ps ,
bgtype = bgtype,
khmin = khmin ,
khmax = khmax ,
logkhint = logkhint ,
z = 0.,
cosmo = cosmo ,
**params)
sigm = sig
#sigm = cosmo.growth(z=z)[0]*sig
#print sigm
dlsinvdlM = -psu.dlnsigmadlnM (M ,
ps ,
z = 0. ,
bgtype = bgtype ,
cosmo = cosmo ,
khmin = khmin ,
khmax = khmax ,
logkhint = logkhint ,
**params )
#used this to figure out what problems were
#print z, sigm, dlsinvdlM , deltac
if fittingform == "Bhattacharya10":
f_sigma = mf.__fsigmaBhattacharya (
sigma = sigm,
deltac = deltac ,
z = z ,
A0 = 0.333 ,
a0 = 0.788 ,
p0 = 0.807 ,
q0 = 1.795 ,
alpha1 = 0.11 ,
alpha2 = 0.01 ,
alpha3 = 0.0 ,
alpha4 = 0.0,
Mlow = 6e11 ,
Mhigh = 3e15)
elif fittingform == "MICE" :
f_sigma = mf.fsigmaMICE(sigma = sigm, z = z)
else:
raise ValueError("This fitting form is not implemented")
rhobg = psu.__rhobg( z =0. , bgtype = bgtype,
unittype = "solarmassperMpc3", cosmo = cosmo)
#dndlnM = rhobg *f_sigma *dlsinvdlM /M
#dndlnM = rhobg *sigm *dlsinvdlM /M
dndlnM = dlsinvdlM *f_sigma/M * rhobg
#critdensity(h = cosmo.h, unittype = "solarmassperMpc3")*cosmo.Om0
#return dndlnM
#dndlnM = rhobg*f_sigma
#dndlnM = rhobg*np.ones(len(sigm))
#dndlnM = mf.__fsigmaBhattacharya(sigma = sigm, deltac = 1.686, z = z)
return dndlnM
def dndlnM0 ( M ,
ps ,
z = 0. ,
khmin = 1.0e-5,
khmax = 2.0 ,
logkhint = 0.005 ,
bgtype = "cb",
#powerspectrumfile = "LCDM_matterpower.dat" ,
cosmo = None,
deltac = 1.674 ,
fittingform = "Bhattacharya10",
**params ):
"""
returns the mass function dn/dln(M) in units of h^3 Mpc^{-3}
args:
M: mandatory, arraylike
mass in units of solar Mass
ps: provide power spectrum tuple from CAMB at z = 0. The
required tuple is (koverh, ps)
cosmo: cosmology model of type FCPL
deltac : float
delta c value used in the fitting function
fittingform: string, optional defaults to "Bhattacharya10"
choices:
"Bhattacharya10": Bhattacharya 2010 fitting form with
values suggested in paper"
"MICE" : Crocce etal fitting form with
values suggested in paper
returns:
numpy array containing mass function in units of Mpc^{-3}
CHANGES:
added argument deltac with default value 1.674
"""
#z = np.asarray(z, dtype = float)
h = cosmo.H0/100.0
#rhocr = critdensity( h = h ,
# unittype = "solarmassperMpc3")
sig = psu.sigmaM (M ,
ps ,
bgtype = bgtype,
khmin = khmin ,
khmax = khmax ,
logkhint = logkhint ,
z = 0.,
cosmo = cosmo ,
**params)
#sigm = sig
sigm = cosmo.growth(z=z)[0]*sig
#print sigm
dlsinvdlM = -psu.dlnsigmadlnM (M ,
ps ,
z = 0. ,
bgtype = bgtype ,
cosmo = cosmo ,
khmin = khmin ,
khmax = khmax ,
logkhint = logkhint ,
**params )
#used this to figure out what problems were
#print z, sigm, dlsinvdlM , deltac
if fittingform == "Bhattacharya10":
f_sigma = mf.__fsigmaBhattacharya (
sigma = sigm,
deltac = deltac ,
z = z ,
A0 = 0.333 ,
a0 = 0.788 ,
p0 = 0.807 ,
q0 = 1.795 ,
alpha1 = 0.11 ,
alpha2 = 0.01 ,
alpha3 = 0.0 ,
alpha4 = 0.0,
Mlow = 6e11 ,
Mhigh = 3e15)
elif fittingform == "MICE" :
f_sigma = mf.fsigmaMICE(sigma = sigm, z = z)
else:
raise ValueError("This fitting form is not implemented")
rhobg = psu.__rhobg( z =0. , bgtype = bgtype,
unittype = "solarmassperMpc3", cosmo = cosmo)
dndlnM = rhobg *f_sigma *dlsinvdlM /M
#dndlnM = rhobg *sigm *dlsinvdlM /M
#dndlnM = dlsinvdlM *f_sigma/M * rhobg
#critdensity(h = cosmo.h, unittype = "solarmassperMpc3")*cosmo.Om0
#dndlnM = M * dlsinvdlM
#dndlnM = rhobg*np.ones(len(sigm))
#dndlnM = mf.__fsigmaBhattacharya ( sigma= sigm, deltac = 1.686, z = z)
return dndlnM
| 22.910299
| 72
| 0.629785
| 934
| 6,896
| 4.609208
| 0.171306
| 0.007433
| 0.018815
| 0.006969
| 0.925203
| 0.918235
| 0.914983
| 0.905459
| 0.905459
| 0.905459
| 0
| 0.051536
| 0.25435
| 6,896
| 300
| 73
| 22.986667
| 0.785687
| 0.517401
| 0
| 0.939759
| 0
| 0
| 0.075339
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018072
| false
| 0
| 0.018072
| 0
| 0.054217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcd6ec7b98415149bbe59ba0b377b106d95f151a
| 97
|
py
|
Python
|
OnePy/portfolio/myportfolio.py
|
sibuzu/OnePy
|
464fca1c68a10f90ad128da3bfb03f05d2fc24bc
|
[
"MIT"
] | null | null | null |
OnePy/portfolio/myportfolio.py
|
sibuzu/OnePy
|
464fca1c68a10f90ad128da3bfb03f05d2fc24bc
|
[
"MIT"
] | null | null | null |
OnePy/portfolio/myportfolio.py
|
sibuzu/OnePy
|
464fca1c68a10f90ad128da3bfb03f05d2fc24bc
|
[
"MIT"
] | null | null | null |
from OnePy.portfolio.portfoliobase import PortfolioBase
class Portfolio(PortfolioBase):
pass
| 24.25
| 55
| 0.835052
| 10
| 97
| 8.1
| 0.7
| 0.54321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113402
| 97
| 4
| 56
| 24.25
| 0.94186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
bceb8b728e66f3dee035f06dfd61e32824aa34f6
| 18,717
|
py
|
Python
|
sajawal tech doxing tool/Attack.py
|
SajawalAslam/sajawal-tech-doxing-tool
|
50bc547085a7febaa27c3787707fdfad4d1b7c64
|
[
"MIT"
] | null | null | null |
sajawal tech doxing tool/Attack.py
|
SajawalAslam/sajawal-tech-doxing-tool
|
50bc547085a7febaa27c3787707fdfad4d1b7c64
|
[
"MIT"
] | null | null | null |
sajawal tech doxing tool/Attack.py
|
SajawalAslam/sajawal-tech-doxing-tool
|
50bc547085a7febaa27c3787707fdfad4d1b7c64
|
[
"MIT"
] | null | null | null |
from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x08\x2d\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xef\x11\x00\x00\x00\x00\x00\x18\x89\xdf\x78\x75\xec\xd7\xa0\xf1\x47\x3b\x71\x23\xa1\xf6\xf4\x9c\x00\x00\x00\x00\x00\x00\x00\x00\x1c\x93\x27\x95\x21\x51\x65\x81\x79\xc6\x6a\x47\xe6\xf0\xc3\xb9\xae\xc6\xf5\x86\xf5\x80\x78\xf7\xe8\x52\xd0\x58\xdc\xcd\x97\x8c\x2c\x33\xc4\x1c\x7e\x39\x9c\x7a\xaa\x39\xe0\x3a\x18\x53\xed\xa7\x33\xe0\xaa\x5b\xb6\xd9\x31\x4d\xaf\x33\xea\xc1\x98\xe9\x79\x24\x60\x4b\xe6\x14\x9b\x58\x8f\xd3\x21\xd6\xff\xb7\x5a\xc8\xa2\x26\x03\xd1\x6f\x19\xf1\x02\x4a\xbd\x1e\x26\x94\x01\x2c\x8c\xca\x1c\x30\x2c\xbc\x77\x4f\x5c\x8c\x82\x8e\x37\x9f\x40\x08\x38\x2f\xdb\xcf\xe5\xe3\xd3\x1a\x0d\x1c\xf9\x3d\x0a\xf8\x1d\x34\x10\x87\x87\x53\x9c\xba\x7e\x98\xd7\x5a\xeb\xc7\x0f\xb2\x85\x69\x4f\xb7\xf9\x67\x48\x82\x57\xa4\x8f\x12\x95\x1d\xca\x02\x37\x3d\x6b\x2a\x00\xa0\x0f\x81\x46\x6c\x13\x65\x6b\xc5\xab\x00\xf4\x2d\x1c\xc9\x35\xde\x1f\x70\x0a\x57\x30\x3c\xcb\x0e\x17\xbc\x1a\x0b\x99\xf0\x6c\xe3\xd9\x35\xa5\x32\x33\xeb\xe2\x61\xda\x1f\xfe\xe2\xe5\x50\xfd\x7a\x1e\xd4\x7d\xfd\x29\xa9\xe3\x3d\x8d\x28\x96\xb2\x1c\x38\x01\x2e\x68\x36\x66\x72\x6c\x06\xf0\x09\xfe\x81\xc7\x84\x83\x95\x47\x66\x6b\xd7\x08\x3e\x5f\xbd\xea\x1f\x36\x95\xe7\x79\x36\xdf\xe1\x91\x04\x46\x7f\xb3\x5b\x59\x02\xb3\x59\xfd\xc1\xf3\x9e\xc0\xcf\x2d\x78\xc5\x25\xb7\xb3\x5f\x9a\x18\x0d\xf0\x88\x4c\xe7\x2e\x7d\x29\x17\xdd\x64\x18\x5d\xda\x00\x1b\xb4\x68\x46\x27\xc0\x54\x41\x91\x85\x77\x29\xd0\x9e\x1f\x88\x72\x61\x42\x64\x30\x6f\x5c\x5d\x6a\x0d\xc3\x2d\xd5\x14\x91\xd4\x27\xdf\xb5\xe5\x14\x4c\x04\xeb\x36\x41\xb1\x4b\xce\xc2\x9a\xc4\xd3\x46\x3f\x05\x93\x8f\xae\x2e\x5c\x95\x3f\xc5\xcd\xf8\x26\x48\xf0\x81\xca\x4f\x33\xbb\x95\xd2\xab\x5d\x2b\xdf\x5d\xb3\x0a\x82\x01\x0d\x4a\x8e\xab\xd8\xa1\x11\x7c\x48\x88\x9c\xec\xc7\xa4\x76\xf1\x08\x5d\x74\xb7\x51\x98\x9b\x8e\x50\xc4\x3a\xd9\xff\xd7\xfe\xed\xbe\xda\x59\x2d\xcc\xfc\x98\xb9\xd5\x92\x0f\x8b\x98\xb1\xfc\xee\x6a\x8a\x30\x2d\x71\x69\xb0\x17\xe5\x2a\xbe\x46\x69\x7e\x27\xb2\xae\x9c\x5c\x08\x84\x71\x95\xfc\xf6\xcc\x8f\xe0\x21\x1f\x80\x53\x21\xec\xd0\x7c\xb4\xf4\xcc\x18\xe1\xbd\x6f\x7a\x70\xac\x09\x31\x67\x63\x7d\x57\x64\x6d\x06\xd8\x39\xbc\x93\xd0\x2a\x6a\xad\x31\xf4\x0b\x14\xb6\x78\x25\x99\xd7\x21\x2f\x6d\x37\x57\xc3\x04\x42\xab\x00\xc5\x02\x29\x6d\xe2\x59\xe7\xc6\x7b\xf8\xae\x8d\xa3\xc3\x8e\xd2\xe7\x1b\xf4\x3d\xf7\x7c\xda\x13\x77\xa8\x0b\xbb\xad\x82\xbc\x68\xc6\x89\x0d\x0c\x6d\x45\x76\x9a\xad\xf5\xc0\x4e\x0a\x85\x80\x6a\xae\xcb\x8d\x22\xdd\x31\x62\x00\x42\xe9\x5a\xa8\x89\xa1\x24\xf5\xea\xcc\x9f\x4c\x18\x5f\xc0\x64\x2f\xe0\x60\x61\x3a\x64\x94\xae\x41\x51\x8e\x3f\xcf\x13\x3b\x27\xa0\x1c\x0d\xa8\xcc\x77\xca\x3e\xf7\x1c\xc5\xac\x9d\x9d\x70\xf6\x7f\xcc\xe7\xc5\x47\x06\x44\xf7\x8a\x0f\xbc\x55\x4a\x94\x6b\x9d\x04\xe1\xe3\xbe\x1d\x38\x44\x69\x1c\x9b\xb4\xda\xd9\xd7\xa6\x01\xbc\xb1\xdd\x67\x80\xcd\xf9\x9e\x7b\xdf\x24\x0b\xdf\x40\x6b\xc3\x8a\x64\x9a\xff\xf5\xd1\x0b\x15\x04\x8b\x2c\x5b\xe5\x63\x2a\xda\xb2\x80\xd3\xee\x29\x9f\x5e\x4f\x79\x3f\x9f\x35\xf8\x13\x33\x61\x1d\xbe\xe8\x14\xa6\x40\xfd\x2e\xc4\x55\x77\x7b\x59\x09\x9d\x59\xe7\x76\x5d\xf1\x0b\xc3\xc1\x94\x53\xda\x66\xdf\xa2\xd7\x7f\x61\x1a\xef\xcb\x11\x0c\xb9\x75\xaa\x2d\x72\x35\xb0\x9e\xf2\x73\xb3\x69\x1a\x6d\x9f\x29\x1b\xa2\x12\xf4\x4f\xb7\x55\x24\x90\xc8\x66\x41\xf4\x57\x69\x66\x13\x52\x91\x56\xb5\xef\x2b\xfa\x3b\x86\x00\xda\x7b\xfa\x05\xce\xea\x3b\x68\x6b\x08\xb0\x7f\xd9\xf2\xf1\x56\xb1\xf5\xbe\xa2\x19\x3f\x92\xcf\x07\x32\x25\xa9\x5c\x0d\x1d\xc4\xf1\x2e\xbc\x76\x59\xdf\x11\xbd\x85\x73\xfa\xb4\xb6\xb4\x4a\xd7\xcb\x7c\xd7\x15\xc3\x80\x70\x62\x18\x4c\x44\x20\x64\x1a\xe1\x1e\x8e\x90\xe9\xbc\x4e\xdf\xea\xcd\xfb\x7f\xd7\x19\x65\x5e\xa4\x41\x65\xa2\x6a\xa2\xdc\x40\x0e\x40\xd0\x71\x0b\x00\xef\x82\x05\xf5\x5c\x93\xcf\x6a\x2c\x11\x5d\x02\x45\x3c\x6f\xde\x78\x3b\x46\xe5\x00\xcf\x9c\xb4\xd0\x3a\xfa\x05\x08\x95\xb2\x48\x89\x8e\x80\x4a\x6f\x62\xa4\xb4\xd8\xb1\xb1\x4d\xfe\xa4\x9b\xc4\x58\x98\xf6\x6f\x8b\x36\x43\x26\xf5\x4c\x92\x2c\x05\x94\x09\xa8\x44\x7f\x37\xc1\x04\x58\x61\x3c\x3e\x05\x83\xf2\x16\xb8\xd1\x66\xe4\xa6\x71\x40\xdb\x15\x60\xbb\x2f\x70\x31\x49\xcc\xde\x82\xcb\xb2\x19\xfb\xc5\xff\x9d\xb5\x2e\x6f\x94\x86\x64\x9a\x05\x77\xb8\x19\xd2\x4f\xe8\x49\xa5\xa5\xda\xd2\xea\xe9\xdf\xd1\x1b\x77\x60\x37\x74\x07\xec\x6e\xda\xf1\xe7\x95\x38\xd1\xc5\x7d\x93\x64\x95\x55\xca\x9c\x49\xb1\xdf\x0f\xcd\x58\x67\x58\x02\xc6\xa1\xfa\xf5\xdf\x37\x29\xc1\x9f\x66\x73\x8c\x6b\x77\x4c\xaf\x73\xd2\x19\x10\x1a\x26\xe1\x80\x7b\x19\x79\x3d\xb7\x85\xf1\x63\xf6\x6d\x8f\xdf\xdb\x68\x89\x23\xa6\x98\xbb\x3e\x58\xbe\x7d\x90\x51\x83\x08\xbb\x94\x82\x2f\xf2\xf2\xb3\x83\xeb\xa1\xb6\xbc\x3a\x42\xac\x46\x54\xa6\xb5\xad\x64\xbd\x32\xf4\x6a\x2c\x6b\xa7\x8b\x6f\xf6\x5a\x49\x1d\xff\xa7\x77\xd4\x32\x2a\xa3\xde\x59\xb7\x31\x74\x0f\x2d\x66\x00\x4e\x22\x97\xfb\x0f\x02\xe3\x1d\x67\x34\x7f\xe1\xcc\xef\x78\xff\x15\xc9\xd5\x3f\x00\x4c\x45\x13\xed\x35\x96\x04\xfe\xd9\x38\x30\x55\x8a\x80\x78\x7f\xaa\x20\x02\x9f\x5f\xc9\xf5\xf7\xe4\x66\x25\x7c\x47\xf4\x45\x13\x22\x57\x46\xc7\x47\xf8\xc0\xc8\xef\xff\xd3\xf0\xc0\x5c\xb8\xaf\xea\x3e\x62\x44\x72\xc6\x1a\x88\xe1\x93\xdb\xa6\x0c\x5b\xe8\x9b\xc2\x5e\x09\xda\xf2\xe4\x5c\xed\x40\x32\x17\xbc\x12\xb3\xa5\x7d\xb1\x2f\x29\x1e\xc2\x4b\x4a\xef\x07\x49\x92\x47\x1e\x08\x65\xb2\x91\x63\x37\xbe\x73\xb2\x93\x71\x5a\xec\x1a\x28\xd0\x8c\xbf\x52\x7f\xb5\x36\x79\x5a\x12\x9b\xb8\x94\x26\x7c\x49\x72\x04\xf9\x16\x08\x4a\x1b\xb9\xf7\xef\x2c\xe5\x8d\xff\x08\xef\x6d\xa9\xf0\x9d\x7b\x8b\xde\x93\x0d\x54\xe1\x5a\xa4\xe3\x83\x7f\x6f\xdd\x40\xdf\x80\x93\xf5\x90\xad\x50\x32\x93\x31\x2c\x19\x04\x2e\xe4\xef\x66\xa6\xfe\x67\xfd\xf0\xf8\xfb\x7f\x44\x16\x29\x3c\xa3\xab\xd0\x18\x9a\xcb\x94\xc6\x6f\xc8\xc6\xfb\xfd\x61\xbd\x95\x30\xdb\x79\x56\xea\x06\xf0\x39\xa2\x51\xab\xd3\x0e\xa7\xb0\xcb\x0a\xfc\x7f\xb9\xbb\x24\xed\x99\xaf\x15\x8b\x27\xa0\x32\xd6\x49\x5b\xf6\xac\xcc\xdd\xe4\xab\x11\x25\xdf\xa6\xb6\xf2\x44\x44\x23\x7f\x89\x04\xb6\x31\x68\x56\x49\xcb\x63\x58\x84\xf0\xaa\x06\xb0\xe4\x26\x6e\x22\xb1\xb7\xbc\xa9\x01\xf4\x54\x7a\xb0\xa4\x21\x6d\x9f\x03\xa2\xb4\x9f\xa9\xe6\x06\x74\x92\x75\xac\x66\x42\x0c\x5c\x59\x98\x5d\x55\xa5\x8d\xb0\x60\x7f\x2f\xda\xaa\x8b\xe2\xbf\x21\xd7\x75\xa4\x3c\x63\x83\x72\x9e\x1a\x5d\x39\x65\x66\x6b\x3c\x92\x6d\x8b\x58\x47\x2d\x0f\x32\xeb\x60\xcf\xab\x71\x86\x58\x01\xe6\x92\x47\xa3\x55\x62\x4f\xfe\x32\x87\x65\xcf\xf6\xc1\x8e\x9f\xfe\x1f\x7b\x56\xc5\x0f\x3b\xbd\xa2\xb1\xfd\x3d\x23\x9b\x44\xae\x54\xa4\x92\xa6\x9f\x5c\x69\x77\xc0\x37\xbd\x9e\x14\x29\x7c\xfc\x3f\x9a\x22\x5f\x34\xa5\x91\xbe\x3d\xd8\xf3\x93\xd7\x05\xf2\x1b\x68\x0c\xab\xb7\x45\xc3\x08\x5f\x3c\x4d\x53\xe4\x43\x16\x71\xdb\x1d\xbc\x8d\x32\xe0\xe7\x39\x29\x68\x0c\x6c\x04\x92\xf8\xc7\x2d\x5d\x6a\x33\x03\x59\xc3\xa6\xcc\xe5\xca\x15\xbe\xad\x77\xb0\xf1\xa3\x13\x73\x47\xf1\x10\xb1\xdc\xe5\xf1\x05\x67\x7f\x08\x7f\x3f\xb7\x54\x86\xaa\x91\xe5\xa6\x8b\x36\x56\xa4\xe9\x8a\x3f\x25\x6e\x9c\x1e\xaa\x89\xd0\xec\xe4\xc6\x1a\x6b\x36\xcd\x2e\x75\xdb\xb9\x05\x22\x83\xfb\x70\x89\x67\xcc\xd2\x78\x24\x60\x45\x66\xf4\x75\xb6\xd4\x2b\x22\xcb\x0b\xb9\xef\xa3\x64\xca\x40\xcf\x41\x23\x15\xfe\x4f\x56\xf7\x45\x10\x0b\xbf\xcb\x53\xd5\x59\x2d\x48\xcc\xd0\x9f\x66\xd8\xde\x3c\x0a\x54\x68\x41\x93\x15\xfc\x7b\x9a\xcf\x21\xf5\x82\x38\xb9\xc7\x20\x12\x63\xf3\x84\x6b\x86\x35\x98\x61\xe9\x9f\xee\x87\x64\x37\x03\x2f\x3b\x8c\xf1\x7d\x82\x2d\x0d\xb5\x61\xe6\x7d\xff\x83\x04\xc1\x79\x68\xc7\xe1\x2f\x5f\x95\x7d\x47\x4f\x0c\x2a\x55\x3d\xd6\x1c\x96\x96\xef\x1d\x3f\xb3\x81\x3d\x21\xe4\x92\x8d\x29\xea\x0f\xcc\xbd\x27\x76\xe7\xd9\x71\xa2\x10\xf0\x07\xa7\x7b\x75\x38\x3b\xa4\x4e\x0f\x4e\xc7\x71\x23\xbb\x4f\x38\x29\x25\xa6\x05\xda\xaf\xb3\x23\xd4\x11\xb6\x4b\xf2\xf3\xe5\x5f\xbe\x42\x7f\x08\x06\xff\xeb\xa6\xf6\x34\x80\xc7\xec\x9a\xbd\xd1\xf0\xb3\x21\x68\x9d\x5c\xcb\x82\x5f\x97\x97\x75\x53\xa6\x50\x20\xed\xf6\x3d\x7d\xc7\xbb\xe8\xcd\x71\x8c\xb7\xd4\x4b\xeb\x7f\xa3\x8c\x8e\x3e\x36\x01\xf0\x7a\x35\xc8\xf3\x19\x06\x52\x4c\xc2\x3f\x07\x64\x58\xc0\x31\x48\xc9\x8c\xb9\xde\xcf\x36\x4b\xb8\x88\xbe\xbe\x76\x44\x28\x49\xbb\x7b\x25\xa0\x69\x9e\x56\x24\x22\x44\x7b\x86\x46\x08\x6a\x02\x35\xe5\x97\x10\x41\xb6\xbb\xca\xee\x89\xf0\x5f\x03\xc5\x43\x83\x91\x93\xe2\xc1\x67\xa0\x22\xc9\x2b\x4c\xa7\x02\x7d\x94\x0e\x8b\xe8\x1c\x6a\x70\x55\xf8\x7b\x18\xf5\x1b\xe4\x8b\x04\x2e\xfe\x19\x06\x62\x8f\x16\xca\xfd\xe7\x3b\x32\x0c\x24\xa3\xee\x3d\x33\xf3\xb1\x97\xe3\x5b\x6c\xea\x15\xbf\xa9\x5e\x5f\xd9\xf6\x7a\x31\xeb\xa2\xe8\xaf\x5a\x67\x2d\xc8\x31\x7c\x55\xd5\xb2\x63\x41\xfe\xd5\x40\xa0\xbb\x07\x41\x9e\xc9\x31\x50\x22\x52\x47\x90\x6d\x4e\x81\x15\x53\x1e\xb4\xa9\x36\x4c\xac\x65\xf3\x74\xf9\x30\x77\xf1\x15\x38\x0d\x5b\x3a\x94\x85\x30\x0d\x44\x4f\x73\x39\x20\x94\xe3\xb4\xed\xd1\xd5\xf1\x1e\x48\x5e\x61\xcc\xee\x04\x11\x0c\x7d\x54\xa9\x5b\xb9\x43\xa7\x9d\x00\xb7\x55\x44\xa9\xea\x22\x03\x26\xaf\x9e\xf4\x8e\x61\x8e\x29\x23\x9c\xff\xd5\x42\x4a\x39\x12\x2f\xeb\xf9\x26\x49\xcf\x08\xf6\x97\xd1\x17\xb5\x19\x68\xb4\xe7\xf9\x87\x43\xc2\x74\xcb\xe8\xc0\x06\xdb\x88\xe6\xdc\x39\xd3\xfd\x73\xb5\x1f\xe7\xbc\xbe\x99\xd4\x97\x5c\xd7\xba\xf1\x55\x9f\x7c\x4a\x05\x1b\xfd\xcc\xb0\x0c\x35\xcb\x95\x0d\xff\xf4\x38\x9d\xf5\x3d\x0e\x77\x9e\xe5\x27\xda\xdf\x31\x83\xb0\xd2\x32\xe2\xd5\x50\x2f\x4c\x07\xcf\xb8\x7f\x8c\x7f\xdf\x85\x67\xa0\xc8\x6c\xfc\x34\x82\x22\x9a\x45\x50\xf7\xad\x97\xdb\x57\x8b\x3e\xb2\xcf\xea\x42\xa9\x43\x9b\x90\x69\x3f\xe8\xc7\x40\xac\xf5\xa8\xbc\x5b\xef\x23\x43\xac\x98\xa4\x49\x20\x75\x4e\x52\xd8\xed\x67\x1a\xe4\x8b\x9a\xb2\x6e\x83\xaa\x39\x0c\x09\x70\xdc\xfa\x05\xd6\x50\xfe\xc5\xc5\xf8\xa7\x3c\xa7\x97\x58\x38\x0b\xe3\xf3\x16\x09\xcd\x99\x61\x9f\x39\xe4\x35\x9c\x9c\xc9\x9a\x5b\x95\x57\x23\x97\x61\x3c\x13\x55\x7a\xa5\x5b\x47\xae\x79\xf8\xb5\x18\x5f\x89\xb9\xa6\xa2\x89\x35\x89\x82\xb4\x43\x1d\x66\x0c\xce\x6b\x4d\xe2\xde\xff\x0e\x61\xc8\x1a\x86\x6f\x74\x67\x84\x8d\xf3\x5c\x70\xe3\x30\xd4\xd6\xf7\xfb\xd2\x3e\x4c\xf3\xd0\xeb\x7b\x68\x82\xd1\x93\x0d\x37\x58\x68\xd8\xd8\xd1\x9e\x7f\x0d\x6e\x77\xe4\xf4\x29\xe1\xa4\x0f\xf4\xbd\xed\x3f\x65\xdd\xc8\x61\xa7\xa8\x52\x79\xc3\x8e\xf7\xc6\x42\x31\x80\x6d\xa3\xa4\x8e\x3a\x82\xe7\xba\xa2\x05\xb3\xb6\xc9\x81\x3e\xf5\x70\x85\xb9\xd7\xe0\xcd\x43\x89\x91\x78\x8f\x65\x53\x25\xc7\x4d\xab\x97\xb4\x9e\xe1\x32\xce\x28\x24\xf4\xb9\x6b\x87\xcb\x1d\xa4\x19\xd0\x6e\x7c\x36\x9c\x35\x03\xbb\xfc\xf3\xe2\xbc\xac\x1e\x4c\xb5\x3a\x56\x09\x68\xbd\x69\x5b\x0d\x48\xea\x49\x6c\x85\x22\xad\x9f\x5d\xac\x1f\x2b\x29\x75\xbb\x84\x2d\x93\xa8\x13\x0c\x9e\xdd\x9f\xae\x6c\x21\x87\xbd\xa8\x0b\xb5\x39\x06\xb0\x5a\x20\xb5\x30\x6c\x8f\x90\x36\x9d\x21\x71\xe8\x5f\x44\x7b\xe4\x12\xcf\x22\xcd\x5f\x69\xf1\xdc\xd5\xeb\x24\xa7\xb7\xeb\x36\x19\xe3\xe1\x35\x35\x32\x32\x31\x02\x32\x7a\xe2\x2d\xf6\x63\x2c\xe0\x90\xc9\x55\x8b\x7b\xdb\xd5\x75\x16\xb4\x7e\xd0\x2e\xc7\x81\xe1\x0a\x2e\x87\x43\xf3\x4b\x0c\xee\xdf\x2f\xd7\xe4\x93\x94\xf4\xf5\x62\xd9\x26\xf1\xf9\x01\x59\x1c\xff\xee\x1e\x97\x6f\x39\x15\xf1\x40\x29\xf5\xe3\x63\x55\x70\xb0\x49\xee\x73\x79\x0d\xcc\x86\x07\x24\x42\xc5\x24\xaa\x69\xaf\x82\x10\xaa\x33\x67\xf9\xfc\xdc\xa9\xdb\x8b\xc3\x4a\xe7\x7c\x1a\x0b\x59\x9f\xe0\xb9\x7b\x90\x5b\x0e\x2d\x00\x1d\x04\x6e\xfa\xc7\x97\x5c\xe8\x39\x0a\x08\xa1\xb2\xd2\x7e\x78\x9a\x28\x42\xca\x20\x61\x51\xca\x6d\x88\x63\x18\x5a\x01\x51\xef\xda\x3b\x38\xae\xe4\x2d\xe1\x6b\xb6\xc6\xd4\x60\x16\xa4\xcb\xa2\x1a\x3c\x7d\x71\x5e\xfc\x10\xe1\xff\x58\x99\xa0\xf4\x95\x87\xd7\x6b\xfb\x35\xc8\x14\x96\x9c\xc9\xe1\x7e\xc2\x73\x46\xe2\x9e\x51\x81\x8d\xec\x1f\x34\x80\x6a\x24\xd4\x7a\xe7\x65\x03\x1f\x5b\x3f\xc4\x35\x91\x16\xd1\x16\x7b\x54\xf7\x28\xbc\xce\x4c\x76\x14\x5d\x66\x0d\x6a\x70\x72\x7d\x71\x59\x72\x9c\x93\x72\xc2\xc2\xe6\x32\x3f\x75\x8f\xcf\xb9\x23\x15\xdd\x35\xaa\x70\x3d\x53\x73\x10\xe7\xd4\xce\xb9\x37\x98\x7f\x85\x94\x25\x8b\x25\xf0\xa1\xf9\x34\xf6\x1f\x8f\xde\x7f\xa8\x3b\xa3\xae\xc6\x4b\x21\xfb\x63\x1c\x57\x11\x43\x33\x9f\xc0\x48\x58\xa5\x87\xda\x75\x2c\x19\x14\x3e\x98\x8b\x51\x71\x1f\x57\x40\x40\xc5\x23\x6c\xaf\xcf\x08\xd0\x22\x8a\xa3\x81\x9e\x86\x93\xb5\x15\x0f\x00\x1f\x2f\xb4\x5e\xf3\xcf\xe8\x4a\x7e\x81\x4c\xa7\xff\xfd\x3c\xf2\xfb\x7c\x44\x40\x7d\xc2\x7d\x44\xd9\xfb\x9f\x89\x97\xd8\x87\x28\x69\x61\x71\x12\x2f\xb3\x48\x90\x8a\xf2\xd8\x73\x0a\xb3\x7a\xa6\x93\xd1\xdb\x88\x8d\x5d\x9d\x9c\x1d\x81\x6e\x81\x3a\xa2\x23\xab\xa6\xe6\x9c\x59\x95\x0d\x78\x31\x31\x13\x05\x75\xa9\x82\x49\x91\x8b\x45\x8f\xb2\x1d\xd2\xba\xcc\x3c\x86\xa5\xa4\x74\x19\x3a\x55\x30\x0a\x95\x6e\xf2\xfc\x2d\x8a\x9f\x95\xab\xb8\xf2\x82\x27\x99\xe1\x44\xbe\x64\xe5\xbe\x7d\xe6\xd8\x33\x75\xa3\x36\xc6\xc6\xf8\xe1\x4e\x08\x1c\xef\x55\xe8\x86\x4e\x96\x8e\xdb\x99\x6d\xc5\x01\x64\x0b\xbd\x76\xc3\x84\xff\x03\xa9\x7d\xbd\x77\x5c\xef\xcc\x15\x46\x9a\xe9\x9d\x6c\x4a\x26\x4a\x8a\xd0\xcf\x18\xbe\x2b\xff\x06\x7d\x99\x5e\x95\x8d\xf4\xe2\x90\x73\x85\xf9\x8b\xef\x82\x76\xef\x96\x84\xd6\x14\x0f\xea\xeb\x40\xc3\x47\x17\x1c\x32\x65\x93\xea\xba\xc8\xc2\xdc\xcc\x3a\x7a\x3f\xe8\x94\x94\x4c\x3a\x9e\x75\xd5\x86\xd3\x96\x3a\x4f\xaa\xd3\x2d\xd5\x67\x18\x3a\xec\x08\x49\x09\x8c\x84\x4c\x78\x55\x09\x2f\xb2\x58\xd0\xb7\x37\x91\x3a\x16\x63\xb9\xed\xea\x2c\x8e\x4a\x90\x5a\xf4\xa5\xbe\xe6\x77\x56\xf0\x27\x72\xf8\x90\x63\x3e\x7a\xea\x89\x07\x41\xb0\x1f\xb6\xe9\x2a\x75\x62\xde\xc3\xdb\xf9\x61\x4a\xd8\xc5\x90\xac\x93\x44\xdb\x04\xe2\x61\xef\xa5\x17\xbd\x1a\x1f\x79\xf3\x94\x6e\x24\x39\x45\x42\x98\x88\xb6\x80\xe9\x6b\x6b\x23\xbf\xe2\x7f\x91\x9c\x2b\x29\x59\x87\xc5\xfd\x7d\x31\x69\xfd\xfc\x86\xf0\xc9\x93\xb3\x1e\x50\xc2\xe1\x8f\x44\x92\x16\x8d\xe2\x3c\x1c\xf9\x19\x4d\x01\x6b\xd6\x11\x16\x0f\x88\x8b\x07\x0b\xab\x8d\xd5\x2e\x42\x4f\xe0\x88\xd0\x62\x42\xcb\x2d\x20\xb6\x13\xa3\x61\x1a\xd0\x13\x52\xd1\xa3\x2a\x4a\xeb\xa4\x2f\x26\x04\x31\x04\xc1\x8b\x36\x30\x30\x59\x3e\x74\x6b\x2c\x07\xed\xe2\xa0\x99\x5c\xdb\xec\x53\xd5\x19\x7b\x3d\x01\x3c\x7d\x38\x4d\x28\x2a\x80\x5c\x19\x33\x40\x12\xf4\x29\xdd\x51\xd0\x75\x04\x34\x12\x67\x02\x4c\x32\xe3\xf9\x4b\xde\xce\xf3\xa8\xd1\xf0\x39\xbb\xfd\x09\xc0\xfd\x4a\x35\x6f\x2e\x8a\x1b\x38\x1c\xa4\xcf\x41\x39\x00\x3a\xa1\xf2\xdb\xe0\x16\x9f\xcc\x29\x4c\x5e\xc0\xb7\xed\x07\xba\x03\x29\x7a\x4b\xe7\x99\x25\x14\x63\x7b\x92\x08\x13\xc2\x9b\x2d\x8d\xc9\x19\x24\xd8\x11\x62\x5e\xbe\xb3\x54\xa0\x69\xad\x61\x6c\xee\xbb\xb1\xae\x6b\xc8\xeb\x0b\xec\xce\xd3\x7c\xe1\x1e\x3d\x24\x61\xbe\x6a\xd0\xbc\xf1\x53\x83\xd4\x64\x96\x5d\xd3\x4b\xc5\x6e\xfe\x56\xeb\x67\xc3\x74\x6b\xdc\x42\x96\xe9\x75\x4e\x7b\x22\x26\x36\xb2\xe6\x14\x37\xcb\x7a\xb0\x14\xca\x46\x1b\xe4\x3e\x9b\xd1\x1d\xec\x67\x92\x91\x20\xeb\x6f\x1b\x79\xbf\x58\x69\x6e\x9f\x05\xf0\xda\x68\xff\x2c\xc3\x63\x37\xcc\x49\x45\x2a\x0c\x6c\x9d\x22\x51\xb6\x95\x35\x37\x8d\x03\x38\xa6\x5d\x66\xb7\x9c\x96\xc9\x19\x63\xba\xf5\xa2\xf4\x50\x77\xb9\xc8\x8f\xda\x92\xf3\x67\xa0\x0a\x00\x5c\x8e\x11\x33\xc8\xe8\x68\xb9\xec\xf5\xaa\xd5\x29\x16\x98\x9a\xa6\x2f\x2c\x45\xfd\xbe\xdd\x33\xbf\x22\xb0\x15\xb4\x69\x79\x67\xbb\xc6\x45\x61\x22\xb5\xe5\x0f\x76\x39\x8a\x26\xaa\xa2\x3d\xee\xc1\x1d\x32\xf1\x11\xa0\x84\xd1\x2c\x81\xef\x55\x91\xb3\x4f\x05\xf6\x3d\x15\x9c\x81\x5f\xf4\xf2\x18\xe1\x43\x21\x21\xce\xfd\x06\xae\xdb\xe6\xec\x78\x18\x1f\x41\xad\x8b\xb7\x65\x51\x42\x3c\xb6\xe6\x7a\x5b\x50\x6a\x28\xae\xea\xc8\x87\x4a\x1c\xe6\x9c\xe0\x92\x35\xfe\xd7\xe3\x23\xd2\x89\x7f\xad\xa5\x54\xff\xbd\x04\x7c\x2e\x99\x4d\x79\x76\xd4\x6d\xe6\xa6\x40\xf8\x1b\xbb\xe0\xa6\xe2\x5b\xe5\x43\xd2\x0b\x3e\x25\xb4\xa5\xfb\x4b\xd1\x19\xe1\x7a\xc8\xe1\x92\x57\x43\xac\x24\xbd\xa5\x32\xbe\x03\x63\x76\x48\x34\x4d\xe8\xde\xe2\x2b\x63\x0b\xcb\xfa\x9f\x0a\x0e\x6b\xe2\x09\x41\x52\xf9\x31\x40\x3e\xa5\x50\x89\x34\x12\x49\xd0\x59\x95\x67\xa3\xf2\xf3\xe6\xa2\x3c\x9f\x54\xfc\x96\x52\xd6\xd8\x88\x05\x8a\x40\xc5\xf4\x9e\x73\x32\x47\xc1\x29\x7f\xb5\x05\xd2\x48\xb5\xa0\xa6\x8d\x66\x79\xb4\xd1\x6a\x9b\xb6\x57\x56\x6d\xa1\x46\xcc\x2d\x82\xb7\xca\x71\xb8\x2e\x24\xb4\x69\xeb\x49\xf4\x50\x85\x3a\x47\x10\x57\xd7\xac\x8c\xbf\xae\xde\x7b\x07\xde\xa3\xb5\xd1\x72\x6e\x84\xd3\x67\x19\xb1\xe6\xfb\x0d\x0f\x8f\x8b\x04\xa3\x26\xe5\x97\x94\xa2\x3a\xd7\x15\x1b\x29\x43\x99\x70\xf6\x2f\x0e\x34\x69\x8d\x56\x99\x11\x59\x64\x38\xde\xcb\x2f\x36\x4f\xc6\x02\x06\x30\x05\x7c\x93\x3c\xf7\x12\xd6\x96\xad\x55\x59\x88\x10\x57\x57\xc6\x94\xc8\x9f\x7f\x6d\x12\x51\xd7\xb5\x15\x27\xfd\x48\x2b\x89\xdd\x83\x6a\x5f\xc3\xf7\x2f\x67\x13\xf6\x86\x8e\xae\x8c\xe2\xde\x03\x6b\xf3\x47\xc7\xf9\x88\xdf\x49\xbc\xa7\x10\xce\x17\x1f\xbf\xd5\x11\x2d\x50\xd4\x63\x31\x1c\x4a\x6c\x25\xc8\xee\x71\xbe\x2b\xac\x98\x0e\x98\x10\x5d\xdd\x39\x78\xd3\x47\x4c\x5e\x73\xa0\xc2\x69\xa5\x5a\xb4\x34\xc7\x69\x30\x26\x7c\x7e\x16\x86\x96\x33\x72\x62\xc4\xfc\x44\x36\x47\xff\xe2\x14\x49\x36\x67\xe3\x49\xc7\x77\xc7\x7f\x4f\x75\xd0\x0e\xbf\x36\xc3\x28\x35\xb5\x85\xa0\x85\x0a\xba\xf8\x54\x6c\x75\xa1\xb1\x60\x5d\x53\xb6\x22\xe1\x0d\xe6\xdd\x4d\x92\x1d\x65\x64\x97\xd5\x4c\xcf\x17\xdd\x18\x1e\x6c\x67\x2b\x01\xc5\x20\x0e\x75\x2e\x34\xfa\xa0\x14\x55\xf6\xdc\x32\x6f\x38\x6e\x0f\x11\xb5\xeb\x5c\x85\xd5\x70\x76\x1e\x88\x96\xb7\x91\xe5\xdc\xb7\xa3\xf1\xc7\xf6\x10\x63\xce\x08\x19\xf1\x2b\xc3\x5e\x9a\x56\x9c\x60\x26\x22\x8b\x45\x38\x59\xf4\x7e\xbe\x36\x4d\x94\x69\x6a\x54\xb5\x35\x67\xa3\x6d\x14\x7e\x1f\xc2\x8c\x59\x40\x3b\x8f\x6b\xbf\x12\xf8\xa2\x63\x05\x7f\x4b\x56\x50\xd2\x48\x0e\xcc\x9b\xcd\x37\x48\xeb\x2e\xe5\x1f\xe4\x01\xbb\x5b\x96\x28\x47\x9d\xa3\x95\x0c\x48\x96\x02\xc3\x90\x51\x58\xf1\x22\x72\x3b\xd5\x91\xd0\x76\xea\x9e\x29\xcb\x5e\x57\x78\x65\x2e\x96\x2e\xcf\x59\xdb\x51\x24\x67\xbf\x30\x72\x7e\x78\xa9\x19\x2c\xc0\xb0\x0f\x0f\x4a\x4e\xa3\x12\xc9\x19\xf6\x79\xbf\x46\x56\xf2\x66\x7c\x17\x4e\x8e\x8a\xa6\xe9\x3d\x7b\xcb\xea\x84\x1d\xf6\xe3\x0b\x96\xb0\xa5\xa2\x8d\xd1\x3d\x4e\xd1\x5b\x15\x94\x5f\x3d\x36\x84\xa1\xbf\x61\x66\xca\xf5\x52\x69\xf0\xb4\x88\x6d\x39\x5f\x6e\x7f\xaf\xea\xfc\x7a\xdd\xc8\x02\x68\x35\x4b\x3a\x62\x6f\xf8\x90\x26\xdb\xfa\xa7\xb7\x23\xc5\x02\x55\xf8\x31\x64\xc0\x99\xa0\x9e\x41\x59\x36\x53\x74\x8f\xdd\x2a\x41\x8a\xa8\x52\x1b\x5b\x3b\xcd\x39\x33\x1c\xc6\x08\xde\xbf\x2e\x23\x22\x98\xc3\xd3\x6b\xb9\xe0\x4a\xc9\x8e\x57\x6c\xe3\xe1\xf7\xdb\x55\x30\x17\xb8\xaf\xcc\xc6\x55\x2a\x14\x45\x7c\x82\x8a\x7c\x26\xe2\xfc\xb5\xa8\x19\xad\x20\xd0\x56\x97\x29\x5a\x4e\xe6\xa1\xc5\x02\x39\xbe\x1d\x1f\x54\x74\xde\x6f\xd6\x60\x0a\xf5\xe7\x5d\xe1\x06\x46\xc6\xb0\x7f\x6d\x5d\x0b\xed\x47\x22\xde\x9b\xd7\xe9\x53\xdc\xa3\xd4\xef\x9f\xcb\x0f\x75\xcc\xf5\x19\x9a\xd9\x83\x02\xf2\x3b\x8f\x6b\x67\x54\xc5\x9b\x4f\x82\x6e\x78\x18\x9a\x00\x84\x7c\x55\x41\x00\x78\xb8\x15\x0f\x58\xa2\xc7\xc6\x4b\x5a\x93\x3d\x54\x25\xbe\xe5\x58\xcb\x0f\x11\x82\x98\xd2\x06\xdc\x2f\x64\xb9\x5e\xd1\x8e\x4d\xf6\xff\x36\xd7\x10\xf0\xee\xee\x4c\xa8\x09\x20\x32\xbf\xf6\x23\xbc\x31\x1b\xf8\x31\xb6\x87\x0b\x05\x47\x0f\x94\x9f\x12\x75\x0e\x3c\x5a\x0e\xe4\x45\xd7\x47\x56\x6d\x70\x46\x6d\x51\xec\xca\x7c\x88\x1d\xfc\x67\x66\xc3\xfe\xf8\xfd\xf1\xa0\x9b\x5e\x62\x75\x59\x4e\xce\x7f\xeb\xa3\x28\x3a\x69\xb9\xbc\xe6\x08\x88\xd2\x15\x2f\xda\x37\x4b\x3c\xe9\x3a\x85\x18\xc5\xdd\x48\xef\x24\x8a\x59\x3c\xc5\x1d\x4d\x6e\xe1\xf2\x19\xc1\xc7\x79\xd8\xf3\x0f\x1e\x17\x6b\xef\xd7\xc3\x23\xc2\x9c\x39\xed\x26\x80\xe2\xbc\x96\x4a\x8a\x9c\x1a\xd2\xb6', 2)
| 6,239
| 18,659
| 0.750387
| 4,667
| 18,717
| 3.006428
| 0.056782
| 0.008125
| 0.008339
| 0.006842
| 0.00278
| 0.001711
| 0.001711
| 0
| 0
| 0
| 0
| 0.314608
| 0.000427
| 18,717
| 3
| 18,659
| 6,239
| 0.435352
| 0
| 0
| 0
| 0
| 0.333333
| 0.994764
| 0.994764
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
bcef1ecaff2afed43474645d90ce8865417b3b17
| 11,557
|
py
|
Python
|
testing/testing_script.py
|
ShawonBarman/Blood-and-Platelet-Management-System
|
0a1d4be41d42eca69dd8f8f3ed6ba7b15bcf5fc1
|
[
"MIT"
] | null | null | null |
testing/testing_script.py
|
ShawonBarman/Blood-and-Platelet-Management-System
|
0a1d4be41d42eca69dd8f8f3ed6ba7b15bcf5fc1
|
[
"MIT"
] | null | null | null |
testing/testing_script.py
|
ShawonBarman/Blood-and-Platelet-Management-System
|
0a1d4be41d42eca69dd8f8f3ed6ba7b15bcf5fc1
|
[
"MIT"
] | null | null | null |
from pyhtmlreport import Report
from selenium import webdriver
from selenium.webdriver.common.by import By
import time
report = Report()
driver = webdriver.Chrome(executable_path="F:\\Python program\\BPMS\\testing\\driver\\chromedriver.exe")
driver.maximize_window()
report.setup(
report_folder=r'F:\Python program\BPMS\testing\reports',
module_name='Report',
release_name='Release 1',
selenium_driver=driver
)
driver.get('http://127.0.0.1:8000/')
time.sleep(4)
#Test case 1
try:
report.write_step(
'Go to Donor Login Page for testing functionality',
status=report.status.Start,
test_number="18201043_1"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Donor')]").click()
time.sleep(4)
report.write_step(
'Entered Donor Login Page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
# Test 2
try:
report.write_step(
'Testing Donor Login Page functionality',
status=report.status.Start,
test_number="18201043_2"
)
driver.find_element_by_name("username").send_keys("liton123")
print("Username writted")
time.sleep(2)
driver.find_element_by_name("password").send_keys("uapcse12")
print("Password writted")
time.sleep(2)
driver.find_element(By.XPATH, "//button[contains(text(),'Login')]").click()
time.sleep(5)
report.write_step(
'Logged in',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
# Test 3
try:
report.write_step(
'Go to Donate Blood page and testing Donate Blood Page functionality',
status=report.status.Start,
test_number="18201043_3"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Donate Blood')]").click()
time.sleep(4)
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
time.sleep(5)
report.write_step(
'Entered Donate Blood Page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
# Test 4
try:
report.write_step(
'Go to Donation History Page and testing functionality',
status=report.status.Start,
test_number="18201043_4"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Donation History')]").click()
time.sleep(4)
report.write_step(
'Entered Donation History Page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
# Test 5
try:
report.write_step(
'Go to Donor Blood Request Page and testing functionality',
status=report.status.Start,
test_number="18201043_5"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Blood Request')]").click()
time.sleep(4)
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
time.sleep(5)
report.write_step(
'Blood Request Page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
# Test 6
try:
report.write_step(
'Go to Donor Request History and testing functionality',
status=report.status.Start,
test_number="18201043_6"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Request History')]").click()
time.sleep(4)
report.write_step(
'Request History Page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
# Test 7
try:
report.write_step(
'Go to Patient specific blood Request History and testing functionality',
status=report.status.Start,
test_number="18201043_7"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Patient Request History')]").click()
time.sleep(4)
report.write_step(
'Patient Request History page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 8
try:
report.write_step(
'Go to donor profile page for testing functionality',
status=report.status.Start,
test_number="18201043_8"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Profile')]").click()
time.sleep(4)
report.write_step(
'profile page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 9
try:
report.write_step(
'Testing Log out functionality',
status=report.status.Start,
test_number="18201043_9"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Logout')]").click()
time.sleep(4)
report.write_step(
'Log out',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test case 10
try:
report.write_step(
'Go to Patient Login Page for testing functionality',
status=report.status.Start,
test_number="18201043_10"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Patient')]").click()
time.sleep(4)
report.write_step(
'Entered Patient Login Page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
# Test 11
try:
report.write_step(
'Testing Patient Login Page functionality',
status=report.status.Start,
test_number="18201043_11"
)
driver.find_element_by_name("username").send_keys("shawon43")
print("Username writted")
time.sleep(3)
driver.find_element_by_name("password").send_keys("uapcse123")
print("Password writted")
time.sleep(3)
driver.find_element(By.XPATH, "//button[contains(text(),'Login')]").click()
time.sleep(4)
report.write_step(
'Logged in',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 12
try:
report.write_step(
'Go to patient Make Request page and testing functionality',
status=report.status.Start,
test_number="18201043_12"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Make Request')]").click()
time.sleep(4)
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
time.sleep(5)
report.write_step(
'Log out',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 13
try:
report.write_step(
'Go to Patient Request History page and testing functionality',
status=report.status.Start,
test_number="18201043_13"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Request History')]").click()
time.sleep(4)
report.write_step(
'Patient Request History page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 14
try:
report.write_step(
'Testing Log out functionality',
status=report.status.Start,
test_number="18201043_14"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Logout')]").click()
time.sleep(4)
report.write_step(
'Logout page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 15
try:
report.write_step(
'Go to Available Donor page for testing functionality',
status=report.status.Start,
test_number="18201043_15"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Home')]").click()
time.sleep(4)
driver.find_element(By.XPATH, "//button[contains(text(),'Available donor')]").click()
time.sleep(4)
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
time.sleep(5)
report.write_step(
'Donor available page page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 16
try:
report.write_step(
'Testing About us functionality',
status=report.status.Start,
test_number="18201043_16"
)
driver.find_element(By.XPATH, "//a[contains(text(),'About Us')]").click()
time.sleep(4)
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
time.sleep(4)
report.write_step(
'entered about us page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 17
try:
report.write_step(
'Testing search functionality',
status=report.status.Start,
test_number="18201043_17"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Search')]").click()
time.sleep(4)
driver.find_element_by_name("bloodgroup").send_keys("AB+")
time.sleep(3)
driver.find_element(By.XPATH, "//button[contains(text(),'Search')]").click()
time.sleep(4)
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
time.sleep(5)
report.write_step(
'search page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
#Test 18
try:
report.write_step(
'Testing Contact us functionality',
status=report.status.Start,
test_number="18201043_18"
)
driver.find_element(By.XPATH, "//a[contains(text(),'Connect Us')]").click()
time.sleep(4)
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
time.sleep(4)
report.write_step(
'entered about us page',
status=report.status.Pass,
screenshot=True
)
except Exception as e:
report.write_step(
f'Something went wrong during execution!</br>{e}',
status=report.status.Warn,
screenshot=True
)
finally:
report.generate_report()
driver.quit()
| 25.739421
| 104
| 0.663927
| 1,475
| 11,557
| 5.104407
| 0.091525
| 0.078895
| 0.107584
| 0.063089
| 0.902643
| 0.870634
| 0.861336
| 0.827467
| 0.713375
| 0.679107
| 0
| 0.028161
| 0.204205
| 11,557
| 448
| 105
| 25.796875
| 0.790475
| 0.011595
| 0
| 0.658163
| 0
| 0
| 0.304893
| 0.086198
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.056122
| 0.010204
| 0
| 0.010204
| 0.010204
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
4c0286a5b6ee6de2fe72778259b5c5177e0bdf8e
| 98
|
py
|
Python
|
src/linearAlgebra/vector/sum_of_squares.py
|
DiyazY/datascience-study
|
c738ba4ba1570f750ba5cb4bb31e4fb4a9e8d9ef
|
[
"MIT"
] | null | null | null |
src/linearAlgebra/vector/sum_of_squares.py
|
DiyazY/datascience-study
|
c738ba4ba1570f750ba5cb4bb31e4fb4a9e8d9ef
|
[
"MIT"
] | null | null | null |
src/linearAlgebra/vector/sum_of_squares.py
|
DiyazY/datascience-study
|
c738ba4ba1570f750ba5cb4bb31e4fb4a9e8d9ef
|
[
"MIT"
] | null | null | null |
from dot import dot
def sum_of_squares(v):
return dot(v, v)
# print(sum_of_squares([1,2,3]))
| 16.333333
| 32
| 0.683673
| 20
| 98
| 3.15
| 0.65
| 0.15873
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036585
| 0.163265
| 98
| 6
| 32
| 16.333333
| 0.731707
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
4c4e2b036ecbfdf7dd655079d5a88be757de6d83
| 6,529
|
py
|
Python
|
pyhouse/test/test_house.py
|
yusueliu/ut-pood-python
|
230fc0f744248d4c40e82f45544d4a8f1a28c870
|
[
"MIT"
] | null | null | null |
pyhouse/test/test_house.py
|
yusueliu/ut-pood-python
|
230fc0f744248d4c40e82f45544d4a8f1a28c870
|
[
"MIT"
] | null | null | null |
pyhouse/test/test_house.py
|
yusueliu/ut-pood-python
|
230fc0f744248d4c40e82f45544d4a8f1a28c870
|
[
"MIT"
] | 1
|
2020-04-30T14:21:48.000Z
|
2020-04-30T14:21:48.000Z
|
import unittest
import pytest
from pyhouse.lib.house import House, PirateHouse
class PirateHouseTest(unittest.TestCase):
def setUp(self):
self.tale = PirateHouse()
def test_line_1(self):
expected = "Thar be the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(1))
class HouseTest(unittest.TestCase):
def setUp(self):
self.tale = House()
def test_line_1(self):
expected = "This is the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(1))
def test_line_2(self):
expected = "This is the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(2))
def test_line_3(self):
expected = "This is the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(3))
def test_line_4(self):
expected = "This is the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(4))
def test_line_5(self):
expected = "This is the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(5))
def test_line_6(self):
expected = "This is the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(6))
def test_line_7(self):
expected = "This is the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(7))
def test_line_8(self):
expected = "This is the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(8))
def test_line_9(self):
expected = "This is the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(9))
def test_line_10(self):
expected = "This is the rooster that crowed in the morn that woke the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(10))
def test_line_11(self):
expected = "This is the farmer sowing his corn that kept the rooster that crowed in the morn that woke the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(11))
def test_line_12(self):
expected = "This is the horse and the hound and the horn that belonged to the farmer sowing his corn that kept the rooster that crowed in the morn that woke the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.\n"
self.assertEqual(expected, self.tale.line(12))
def test_all_the_lines(self):
expected = """This is the house that Jack built.
This is the malt that lay in the house that Jack built.
This is the rat that ate the malt that lay in the house that Jack built.
This is the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the rooster that crowed in the morn that woke the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the farmer sowing his corn that kept the rooster that crowed in the morn that woke the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
This is the horse and the hound and the horn that belonged to the farmer sowing his corn that kept the rooster that crowed in the morn that woke the priest all shaven and shorn that married the man all tattered and torn that kissed the maiden all forlorn that milked the cow with the crumpled horn that tossed the dog that worried the cat that killed the rat that ate the malt that lay in the house that Jack built.
"""
self.assertEqual(expected, self.tale.recite())
if __name__ == '__main__':
unittest.main()
| 70.204301
| 438
| 0.744831
| 1,164
| 6,529
| 4.146048
| 0.073883
| 0.02901
| 0.062163
| 0.082884
| 0.934107
| 0.900332
| 0.892872
| 0.877953
| 0.877953
| 0.866142
| 0
| 0.006233
| 0.213662
| 6,529
| 93
| 439
| 70.204301
| 0.933775
| 0
| 0
| 0.092308
| 0
| 0.276923
| 0.725727
| 0
| 0
| 0
| 0
| 0
| 0.215385
| 1
| 0.246154
| false
| 0
| 0.046154
| 0
| 0.323077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.