hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
22d3d686400f893b4f476ac33b91bd166cceb117
| 22,827
|
py
|
Python
|
Bugscan_exploits-master/exp_list/exp-back_1070.pyc_dis.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 11
|
2020-05-30T13:53:49.000Z
|
2021-03-17T03:20:59.000Z
|
Bugscan_exploits-master/exp_list/exp-back_1070.pyc_dis.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-13T03:25:18.000Z
|
2020-07-21T06:24:16.000Z
|
Bugscan_exploits-master/exp_list/exp-back_1070.pyc_dis.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-30T13:53:51.000Z
|
2020-12-01T21:44:26.000Z
|
#Embedded file name: wordpress_vule.py
if 0:
i11iIiiIii
import hashlib
import json
import zlib
def OO0o():
Oo0Ooo = json.loads(zlib.decompress(decode('\x1e\xce\xa4!\xf0\xaalm\xcb\xa1\xcd\xbec\x95;\x1e#r\x01p\xfa\xc8N\xf8\xa2\xc3\x9fP\x95\x88*\x82\xd0\xfdc\xa4`\x0b}\xd9\xab>\xf0\x83\xb2\xdb\x82l:>\x9bi:\x166\\\x03\x0e\xc9\xb2\xd9\x1e\xcd]\xb8\xc8\xde\xc6\xe3\x03\x9b\x8d\xb9\xc9\xf7|`E\xa2\xb5\x98\x0f\xb2\xb9\x11\xa65\x13Q\xef\xc3\x18\xddc\x86\x1d\x1b\x7f\xf8\x83\xab"&s7\xb0<[\xf1\xd7o\xaet$\xd9\xeb\t\x14\xe6\xc1\x18~\xb9d\x9aWW\xaa\xde\x87\xab\x92\t\t\xa9\'\xaf\xc4\x8cZ\xe9\x15\xccpP\xb3\xc9\xd1\x17I\x94\xb0\xcb\xc3@"\xceO\x94\xd0\xa6\'x\xe9-}&\x11x\xf3\x08w\xa1\x05;E\x07\x88\x06\xbf\xffT/\xbf"\x1b\xa9\xa1\x10\xe6\xfe3\x02\x9d\xca\xeclEj\x83\xdf{4\xab\xc5\x11\xbf%v\x19\x7fd\x9e\x19\x1f\xeb^x\x04\xa5-\xed\xb8*\xb5Y#\x12Qnl\x0e\xbf3z\xd4\xbd\xb1\xef\x7feE\xe6S\x97C\xb8\xa9\xc7$\xa2\xf5b\xf8\xe6,\x03\xf6\xfa9xM\x8b\x8b\xc8\xa4\x82\xa4\xe9\xfe\xc9-N\x98\x0f>\x0f\x8a\xce<\x80A\xf0\x00\xd1H*\xd4.\xa9\x1a\xf4C\x96\xd0n\xaeqb\x1c\xebY\x90>\x99\x8a\xcf\x03\x1f\r\xc8\xa9\x8b7\xe6\xfa5F\xbb\x83*v\\7&\xda\x8b;\\\xb1\xd1U\xb0\xe9+\xc3.\x1d\xdb\x14;\xac\xad W\xb7\x9aS\x9dO\x96%8\xf6o\x0f\x1e\xd7)o)\xa2B\xbd\x90\xbd\xa4s\xa9z\t\xa3\x14\xad\xd86:\x1cX\xbb\x8f\xb7 \x86\xfaNJ\xbf\xce\x8b\xfd\x88\x15\x0cE5\xe69@\x06\x1d\xc3\xb3\xc7\xc2ZQ\x1a\xd4Zrb~\xfb\xbf\xdd\x11\x98K9\x9c\xd1\xde\\&X\x08\xed\x15\x80\xfe\x1e\xa6\xfdS\x90\tdR\x9c%\xea\xd7d\x9d\xb4\xaf\xd4\xcd\xa60\xb8\xfdG\xa2 \xadq1\x7f\x1d\x1d\xccZo\xa5>\xd0\x9d\xf0\x8e\xa1k\xbf\xfd\xf9\xfe\x99\xdf\xb0\xea\xd3\x83\xad\xcb\xa3\xbbD\xa1t\xdc\x06\xc8\x04\x1ffr!\xfd=;\xef\xc9\xb1s\xac\x02`G\x99\x84O\x05\x0ev\xac(*>\xbd\x01\xf2\xcd\xba\xff\xbb-\x91M\xd0@\xa4\x85\xda\x00AQ(\xf0\x16\xc5a\xcb\xc3\xa3\xf5U\xc3\xa7\x03(\x9c\xa9\x1f\xca\x18\xdf@^\r\x91\x80\xae\x1a\xcc\xd9\xd5(\x0e\xd2Z\xe8\xc1\t\xca\xe0\xa1\xcc\xdaS\x1ex\x81\x98\x18D9\x80\x12o\xce\x92\xb3N\x02n\x8a{\x97\xcet\xe9\'\x87\xf8\x97G?\xb3\x99X\x88\x04\xa7\x1a\xfa\xc3r\xb8\x00gGGQ\xc6\xab\x1fA\x1aFS\xc5\x94\x88\x01\x87E\x08\xab#\xd3\x8a\x9b\xf5\x99\x95\x06\xbfc\xa9\x1e\xb7\xd1\xcdE\x13U\xe98fv\xf4V3\xdc\xabn\xa0\x8b\xc68M\xbfM:\xbc8*\xf6\x0e\xb5Ql\xcdIr1\xd4\xa2\xfcA\xf5K@e\x8e\xe5\xac\xaa\xa2\xad.\xc3t\x15\\s\x95>#\xea\x17\x96\x1f\xb6G\x07\x7f\xda\x87\xd3R\x98\xcdsSl\xad\x84\xcf\x11\x8d\x92\xc6\xefO\x16#\xa8+\xb6\xce\xc23\xc8h\xebZV\xbb#H;.\xa1\x06K\x87\x7f=\x9dQwi\xb6\xae\xee\xfb\x99Q>\x17Q\x81\x80\xbaF\xde\xdd\xca\xea\xa1\xc7\x8d<3\xef\x85\xd1\xf1\x90m4\xe3\xe9\xbd\x1c\x92aJO\x94\x1c\xaaJ\xdd\xf4l\xe8w?\xd6\xb6|\x93\xa6Po\xe3\xf6L\x1e\x93TA\xc0\x05\x83cu\x85\x1dn\xac\xac\xe2\x0e$\xdbH\xb2\xbfr\n\xc8X\x9d\xbfP3V\xb4\xefRVzn\n\xb2\x97\xa2P\xd7\x8f\xa0@\x8d\x9d~\x06\x0e+J\xe0d\x923\xe4\xfd)\xecYa\xcf\xe69\xd7\n\x85\xbb<\x86\xd6\xa9}V\x11\t\xa4\xba\'-X\xff\x96\xff\xa8.l\x7f\x13\xceR\xa0\x13`\xa6hLV&\xb08\xe8\x87\xf8\xb1\xc7R\xac\x90\x88\x0f\xb3Sp\xef\xbda\xd5C\nK\x16L\xb3u\xf4\x80\x1fae\x8a\x98o\x91\xc6\xfb\xbe\xa4\xe6\x1afc\xc0\x00\x80\x17\xf6\xfb\xf4[\xa3o\xb1#\xbf\xdb\xec\xd1R\x8e\xb3\xdb$p\xf8\xb2:\x03\xa8\xe8\x86o\xe6\x1b (tC\xd0/\x1f\xb5\x06\x85\x15\x0e\x10\xf9#\x13\xa3\xd9\x8c{W\xb9+\xb8&\xb40\x97w\x0e\xb8VM7\xfep\xadC\x8a&\xcf[$p\xc3P\x85\xf2a\x8cD\xe6\xb2\xfdV\x91\xc6{\x82"\xd0\x08{\xfb\xed\xc4 \xe4\xff(\x00/\xf8\xcb\xb6\x13V\x9f\xc0q\xc1*\xabU\xa3\x006j\xe1$\x87N\xe0\xd2\x83(\xf8LyC\xf1\xb8\x9dv\xe2\xfd:\x86\xf1%\x87\xfd7\x99u\xd5\xfb\xa8 O\xdd\xde\xa0\xce\xfe\x1bL\xf3<,\xf2\x9f\x0fL\xdb\xaa\x99\xab\xa9\xe8\xe5\xc6\x19\xca\xd4\x11\x1f2\xb4\xd8E;\xb4v\xb64(\x17\xbf\xf1\xf2tz2\xcd\xa6\xa9n\\B\xae\xade\xde\xbd\x95\xef^mc\x10~S^\xc7\xd32\xe59\xb4m\x8c\x92\xc2\x900\xb2\xbdi\xddK\xd2\xe9\xa2\xe9U\x0e\xf6\xee>\xfa\x88\x83\x9aD\x92yK\xcd\x1cc&$_@\x95;@\xf1\xe2\x81J`]w\x1d\xd8W7\xa7\x13\xd2\xd5R6\xd8#\x82\xc9\x93\xb0\xffo2\x92s\xd2>M\xf8\x0c8.\xee\x11!\x15s\xef\xd6\xaf\xcf.\xe5\xbaV\xdf_\xcf\xe8\x8btc\x90K\tUpM\x8b\xeb\xcb\xedx3MK\xed\x82mX\xad\xf8\xe1z$5:V\r\xd3\t\xe4\xc5\x17K\x1b.\xbf\xd5\xf9Yh\x96\xf0\xdb\xdf2\x92\x92qn\xe8\'RdCv\xe1\xa6\x94_\xc6\xf5\xbf0"S\xb8\xc4\x1f\xecB\x84\xcfy\x97\x06\xa7b\xea8\x87\x0eGx\xde\xa8\xda\x05\xfe\xfa\xd0_8\xcc\x1c\xf871N\'\xee\xb08\xbec\xc6\xf5\x8c\xaa(\xd8\x9d,\xc1\xbb[\x80\xeaF\xf6E\x19\x07\xa0s?\xc8\xf4\x80op\x94\x03_\xe8\xb7\xee%\x0c<\x00\xfe\x98fY\rSI\x0b\x81]\xe6\x1e\xd6\xeb\x13\x86\xa2\xf0\xc7\xf9\xb9(S\xf3U\x92\x1c\xe7\x15\xceop{#W(\xdci\xbe\xf2\xa72\xc6b\xfb\x1bW\xc9q9\x92\x86\xf0\xac\x1a\x86\xed\xce.\x0e\xfe\x10\xa76(\xaf{\xc4(\xb5e&\xa2\x8ff\xc4\x11\x04K\x11,e?\x80s\xee\x81\x1c\xf9\x04\xdd\xaa\x94\xac\t\xfe=\xc3\x1c\xfd>d! \x10`\xba\x0e\x956\x91\xec\xd7R\x99\x1c\x1a\xa8\x1f4\xfa7\xcd\x87&G\xdfmbPBwx8\xd4\xcc\x06\xd1\xefp\xd8\x96\x80\xe1\xbd]#\x7fz\x80l\x81\xe7\x14\x03\xa4\x90q\xc7\x81$\x95\xca\x7f\x19V@)^+b\xf2\xb1\xae\t\x95\x99h\x0f\x92\xd1_c\x9f\x97\x13\x99*Aq\x0c\x9aL"\xf1\xa2\xc6\x1a\xcd\n8Q\xeb"\xb8\xe2\x10\xa4\xbd\x93\x97z\xee\xfd\x18U\x1a\x8a?C\x08\xc6\x1b\xac\xf3\x04\xc7~}AP\xd4I\xda.\xf9L(t\x89\xa7Y;B`\x83\r\x08\xf4\xa1\xd3\x12+9m\x83R\xf1z\x8c?|\xdc\xca\xa0\x12\xa2\xf3\x93\xab~\x84B6\x86\x88^\xfd\xe4n\x0c\x959\xab\xa3\xc8\xd5\x12<\xd8v~\xd5t\x85\xb2\x95\xe4\xb2\x9eNx\xd3\xfe\x97"&\xda`\xbc\xb0\xcd\x93e\xf6\x8c\x07f\x1f\x92\xffF`\xda\xa3\xdb\x19\x11\xf8\xfa\x0eL\xf7\x9e4\xecO[\xf0\xd2\xf3vv\x11L\xa5;3\xd8b\xe6-\xf4\xc2\xd0^\x1c\xf9\xbf\x1a\xa9\xe9\xb3\xf5\xc2\xcd\xfa\xfa\x04\x94\xe4\x1cC!\xf3\x06\x08\x83(`\xae\xb4\xcc7S\x12\xe4A\xb0Q\xd8\xb6\xd3\xf8LY\xb9\x1f"\xe4\x8b<\xeak^>9\x90\t\xbe+`\x99\x8eV\xbb\x85\xf5\xe5_\xa1s\xe4e\xb4\x08\xf0\xd7\xb5\x14\xefQ\x80%\x81\x8d\xcb\xc5U\xf8\xf0\x84\xc5H\x0f9ply\xc0\xc0_5\xa0\x85~5\x02\xb7=\x1a\x9ccwn\xe6\xb3A\xaah8\xdda\x07\xa4>1\xba\xbf\x11z\xd2(\xd3n\xb4%\x87\xd60\x03x\x02\xea\x1f\t\x98)\x90\x94\xeaH\x02t\x93\x1d\x07+\xa9\xf9\xae\xae.\xfc\xa4\xa9-]\xb04Sc\xeev\xb1O\xaa\xb2\\\x06\x11\xcd\x1d\x00\x97Sq\xf1?\xdbtP\xec2r&\x98)\xc7\x0f\xe2\xc1\xee\xa3\xbfA9N\xe87\xca\xfa\x06\x9f\x88\xe8q\x04g\x80\xf1\x952m_\xd5\xfe\x02d\xb1\x83V\xb5_\xcc\x8dJ6T\x1d6\xfc\xb3@\x0c\x87\x9b\xea^\x95\xe0G\xcf(&c&\xc7F\xea\xa0K\xa5u\x88\xf2sLmIZ\xf1\x02C\xa3X<\x82\x14\xb8z-\xf29\xd9\xb385\xe6E\x14EC\xa4\x81\xe5\xff\xcb\xbe\xa3\x824N\x08]\xc6\xc2I\x15\x10\xfc#\xf3A\x83:\xd5\x10\x1b\x03\x86@T\'t\x07\x95c\xf7\x08\xa9\xf9\xe2\x15"\x91\xf5\x94\xef\x84\xa6q\x9btt\xb9\xf8D\xc6#\xc0\xa0\xc6\xd1,\x89^6\xbe\x07\xbe\x12\xa4D\xc43\xca\xdc$\x89o\xd1\xec2\x84P\x1b\xf9\x88v\xe6u\xe0j\x95\xac\xb9B\xd4(\n\xe9\x13Z\xb2\x84QY\xfb\xdb\xb7+=\x8f\xbe\xac\xc3\x9c\xf0\xa7\xb8\xb9\x9c\xa3\xa3(\xdf\xf9\x95\xd7\xd7)\xb9\xf5\xabTh\x15\x97;\x04\xe3\xaf6\x9c{\x86\x159\xaf\xe5Jb\x84\x91\xf9Z!\xb6bH\xaf\x04/\xb9}(u\x00&(~X+H\xf7[\xad\x0b\xd2\xec1\xc3_\xda\xca\xe3\x07\x17D\xf5_\x01y\xb8|\xf2\xbd\xa6\x82_\xb5{\xf7)\x1e\xfcD\xbe}@\xd6Qc\x8eDA3GmJk& u>\x8b\xc7\xb9\xa5\xaai\xa5\x9dOC\x08\xec\xd6P5\xbf\x02\xf2\x1f\x0c\xc2q_\xd7\xe2\xb8\xa7\xe4f\xa1\xb5\x85R\rO{^\x1f\x8c\xc6\x82\x0b%uxLSz\xa73KN\xd7\xce\xff\xf3\x8eMI\xc3\xccd\xb3\x1dq\xc8!W\xa8\xee\x82r\x91f\xd0\xdf ~\xdaK\xc1};\xedr\x84\xf0\t\xb3e{[\xc1i\xa4\xc1\xec\xdb\x82\x8c\x99\xb5\xa0\x0c\xfe\x93\xf6\xd0\xb2+\xe5\t\xc3\x12\x1b@\xeb\xd0o\xbbw\xe7\xfa\x01\xb0%\xfa\xa5&\x97\x19\xa6\xe0\x8e\xf1\xbf\xd9*\xb30\xf4@=k\xcf/\xf9\xe4wB\x0b\x8d\x9c+\x94\xb2\x1f)\xb4w\xd0\xd6\xd3\xd5s\xfa\xdc\x05\x1a\x87\x13`\xdf5\xc5J\x8d\x9d\x8f\xcd\xa1tY\xb8\x83Ce{y\xaf\x19\x0b\xc2\xa6\xd1+7\xaf#\xaei\xf6AI\xe5\x1ek\xc6 \x8c\xf1j\xa6\x15\xda[\x0ed\tom\xd9\xe9=\xb9\xad\x9c+\xa7\xfc\xb2\xfd\x82(\x16\xd1Z{H<\\\xa7BkZ\x8a\xce\t\xed\xe1\xea\xb7\x0bM\xb9I\xac\x9d~@\xe8\xc1\x15I\x13\x86\x06[T\x1ar8\xeeNQ\xc3\x96N\t\x96\x90DU\x82\n\xca\x7f\t\xb5\xa8=<E\xe5\xf9|!\xc1N`\x1a\xfe\x8b=5\x1d\x90\xb3\xd2\x02T\xf8\xa0\xa8I\xc4"f\x03\x96\xfb\x9b\x10{\xae\xe8\x98\xb4\xfcS&\xd9a\xde\x9b2,\xf5\xec\x04y\x87\x9a\xdd\xdcxR\x02;\xaa\xce\xb4\xe0\xc4\xb5\xfb\x00\xf8\xfaQ\xd3\xef\x92{)%\xbb[\xadd\r\x1a\x1f\x1b\xa9\xd4\xb8\x18\x07t\x98\x9c\xfe\x19\x83\x80\xce7E=\xba,\xde\x8e5(\xd9k\xe7>\xb0\x15\x94u\x94N\t\xbc\x1d\x10\x84\x07)_\x8f\xbc\x13\x83\xf5\xd1\xe5\xa3[8(\x80/\xafq4\'\xaf|\x06\x86\xb7\xbam9\xfe\xb50\xa7NT\xa3S|\x11E?\x16Hn_::\x87\xf9\'`n2')))
return Oo0Ooo
def assign(service, arg):
if service == decode('\x11{s\x1fm\xb6U)\xb6'):
return (True, arg)
if 0:
OOO0O0O0ooooo % IIii1I.II1 - O00ooooo00
def audit(arg):
I1IiiI = decode('')
IIi1IiiiI1Ii = decode('')
I11i11Ii = decode('')
Oo0Ooo = OO0o()
for oO00oOo in Oo0Ooo:
OOOo0 = arg + oO00oOo[decode('\x00}m\x1e')]
Oooo000o, IiIi11iIIi1Ii, Oo0O, IiI, ooOo = curl.curl(decode('Cg') % OOOo0)
if Oooo000o == 200 and hashlib.md5(Oo0O).hexdigest() == oO00oOo[decode('\x0bp4')]:
if I1IiiI < oO00oOo[decode('\x10qs\x08t\xab^')]:
I1IiiI = oO00oOo[decode('\x10qs\x08t\xab^')]
IIi1IiiiI1Ii = oO00oOo[decode('\x00}m\x1e')]
I11i11Ii = oO00oOo[decode('\x0bp4')]
if I1IiiI != decode(''):
Oo = decode('0qs\x08t\xab^z\xa8\xbd!\xfc\xeez') + I1IiiI
security_info(Oo)
o0O = json.loads(zlib.decompress(decode('\x1e\xce\xec\xe6p\x97\xeb\x12\x13\x9c\xa7\xd4\x9cE\x8b\xa9,\x0e\x03b\xf84\x1cr\xea\x10\x7fa\xec9\xd5\xe7\xff\xc0\xef5Lv\x1d\x1aFL\xe4\x0c\x17\x1a\xb0\xe5yG\xfb\xcd\x19O\xccKt\xfc\xb4\xa6j\xf8?G#x\xfb\xd1\xa6+\r\xfd\x9e\xb3\xa7`\xe0\x9d\xd7?\xcd\xfe[\x07U\xb5"\x966b\xab\x1b\xca/lz\xddZ%D\xe63oat\xc7#\x10w/\x1a\xff&\xd4"\xc5\x88TQ,H9\xa4\x81\x9a2\x1e\x9f\xcb\xff\xba*\x01\x1eAJ\xef;6~\x8e4\x91\x7f;\x7f\xe0\xbbx1n\xb3I6\xd8\x05\x1f \xc1}\x92\xf9\xd9\x92B\x1c\x81\xa4\x98\xba\x9d6\x1epH\x9aW\x03\xa6*,\xb4&\xbd\x8e\x04\x1d^\xee*\x06F\xbb\xaa}\xd9\xcbnk\xa5\xbe{\xc6\xf2Up\xca\xac\xe7\x87\x18\x84\xc7\xd9t\x9e!?\xdbb\xe7\xf4\x8e\xcc\x99\xce\xb2\x18*4\xe5\x0b\xd5\x00\xe0\x10\xb1S?#!\x84\xba\x90l{\xbeZy\xe2K6\x1dH~\xf8k\xb8\x9bH\xad\x14\xcd\xb0\xbf\x12\xcd=\x17\xf2\x9asOh\x92,\xb8&\xb4\x95\x90\x16\xe4\x90\xb5\x8f\xfe\xc4\xf5\xb6j\xac\xa2\x96\xb3zO\x7fM\x98\xd2\xc1\xe8l\xdd#_\x9ev\xa3\xb0\x93\xa1\x92\x91,z/\xbc>\x1b\xfdu-\xc5\xb2M,\xc2\t\xca\x17(\xe1\x14\xc3\xa1\xebGaYO\xbfb\xdaNB8\x84Cn\xb0\x12?\xd2\x0f\xbe\xa2\xfe6\x1f\xf0\x0fT\xdd\x88\x85C\x1c\xe7\xe0h\xd5\x10\xe53\xa48\x9c6@QX\x1a\x8c\xbc\xf2\x82YY5mKs&\x94\x0c+\xcfq|\n\xaf\xff\xad\xb2 vV\xa8\x8d\xa4\x0e\x06\x02SUM\x07\x9c5\xc8\x92\xf2\x11\xad}\x00\xc5u\x89\x0f\xb7\xa6\xba=\x9d\xba\xbe\xb7\x13D\x91j3.c\x9d\x85\x1e\x82\x17\xd0\xca\x82d\xcc.\x93\t\xe8\x87\x03\xc7H\xc4d;\n\x90\x18\x13\x8ev~x\xf8\xd4\xaa\xf4\xbfUU\xb0\xfb\xec<\t#SK7\xd4\xd8\xa5u\xf2\x87O\x18\x04E\x93`\x1f\tR\x90\x80[\xf53nZ\x82\xd0}\xdc\x1fD\x9ci\x04WM?1\x11\xc7j\x8f\xd5\xe6L\x0e\xe3\xfcR\xb8/\xdfIS\x1dP\xa8\xd7:8\x83H\xaf\xa3\x065)\xc56\xbaA+\xee\xc0\xfah\xc0\x1b4\xb7\x06\xc7\x12\xcb\x92#\x9f\x13\x08\x0e\x8d\xa2\x8d\xecY\xd8+\xdd\x89s\r\xea\x0b=\xb2a\t\x81:\xc6F\x99Y\xd9\xb0\xc6\x95\r\x8ey\xbe\x9b\xb7\xcf\x97\xc9\xdc\xafS\xe6?UaC&\xb9\xf90r\xd4&\x10\x90\xdf\xfbaY\xdeL\xd1\xcb\xb3\x1a\x8d\x9d\xb1\xe7"x\xa0R1\xf8\x1c\x15\xdfB4o\xf3e\x18\x9f\x0cvH\xcd\x89\xf7\x9c\x8b\xce%\x9e\xcd\x88\xe7%\x83?AP\x80\xa9*\x15\xd5\xba\xff\x82+?\x08t\xa9a\x88\x19\x9f\xab\xe3:\x06\xba\x8c\x100\xd9\xc6\x08Bc\xc5\xdeRy\'1\x88\xf4a0\xf7\xbf\x94\xf3-\x05\xe7\xd3\xb0\xbb\xa7\x02"V\xfd\x9a\xe5\xbe_Y\xed\x83\xdf\x81\xff[\xca\xb6\x9cck\x81\x89\xe6\x91\x04\x9d\xe8`\xf4*\xe5\x84\xf5\x8a[\x96\x08\xc0\xf2\xf9u\xdc\n\xd1\x0c\x9a[\xfa\nvm\r\xb4x\x83N5\x98\x93\xde\x06{\xae.\xcd\xa79\xb2ST?Py\xb03\x87\x85\xeb\x88\x93\xff\xb9:\xe3@\x855\xea{\x8b>>\xbdl\x850\x83F!x\xc8\xeb\x91\xbf\xd3\x05\x1a\xe9\xf7\x7f\x92J\xb8\'\xda\xa4l\xf6\xbe\xf32\x01\x8a\x8bi4\xfaV\xac\xdf\xbaL/\xa4(\x06\xbe\xbaG\xb6\xde\x83f*Ib\xa7\xc7w\x99\xbb\xe5\xca\x81c>\xf1{g\xf1\xdb\xe9\xd82\xcd\xfb\x87\xc1v\xa0\xb6\x91\x8aO\x96\xcf\xb4\x87\x16lXe\xf9\x0beb\xfb\x17m\xaa\x13\x0b62H\x17a\xbe2s\x9f\x0f\x0c\x96\t\x03\xe6\xaf$\xa7\xf2o\x99v\xeaF\xaf\xeb\t\x04\xa7\xb5&\xd5l\x99\xd5"Y\t\x85\xcd\x00\x9b\'\xc9}\x90x}\xbd\xf7\xa1e\xa0\xb1>\x88lp\xde\x16\xe0:\x12V\xe3\xc6\x91\xe4;D\x9cl\xd3\x01\x87\x9b@\xda\x93\xdbc\x06\xc7\xa9\xd2\x86#\x9bu\\\xe4\x84=\xf6\xd1\x19\xb7\xdd\x80\x1a\x00\xcay\x11e\xe2\xae\xd3\xa2>\xc1\xe6mU\x0f\xc4\x8f\xff@\xa2(T\\\xe1\x9a\xd2K\xd9\x02L\x11u\x18=i\xb5\xc6\xb9\xa0B\x015\x1e<\xfbN\x05r\xf1R\xd4\xe5\xb6\xe9\x12\xa5\xaa\xe05[?\x9b\xba\xb9b\x8fa\xa8\xce\xa8\xfa!\x8f\xbfhP.~n\xec}R\xca\xbc\x80g-\x12\x99\x89\x0f\xaa\xb0\xe1\x13F\xbd\xaf\r\xb9F]\x8a\x0b\xa3\x8d\xeb\x8a \xfcn\xb60\x8d\xe8X7\x8d\\\xd7C\xa5zR\x7f\xefV\x1a\xf8\x98o2\x7fP\x0f.m\xa5\xda\xaey_,\x92Aiv\x07r\xe7\x92\t\x1f\xcb[\x9d\xc2"\x88\x87\x9c\xe3\x9e\xf5\x931\tV\\\x03\xb2\xda\x85\xa5\xa7\x15\x08\x1c\x1b\xfeE\xaa\x03\xe5\x0e\x14\xc0\xcaV\x13\xca\x12\xb8s\xf6[\xaf\x05\x1cj\x94\xae\xde\xac\xec\x11_>\x9c\x10Tw\xc2\t\xbc\tHv\xe5b\x1e+aH\x8a_\xc0A\x8b.,\x11\xff\x16z\xf5\xe4\xdflQ\xc9\x7f\xa3\x1f\xeeA\xc6\x80\x99_[\xebd\xed\xca\x0c\x11(8\x16\xaa\x06\x80\xf0\n[\xad\x7fg\xf6P\x06\xdd\'2a|,PQ\x99\x01\x03O64\xb6Iw^\x85\t@@B1\xb0\xaeJv\x00Q\x98{\xc8\x8e\x19\xb3\x05\xe2p\x04\xe3\xe0\xc1\xa80\x98\xef\x04sP\xec\xc3\xe4]\xebn\x83\xae\xa4\xf7x\x18\xd3\x05\xa8\x8e!\\\x1a\xa5\xf2\xf4\xe1\xed\xc2\x98qOmY5/\t\x18\x14\x0b\xc6\xa7YG\x1e-\xf0r\xf6\xdf`\x8b\xd5\x0e,\xafM\x0bY~\\\x1do\xc9\\\x8f\xc6|\xb0\x95t\xa9\x9a.\xb1r=\xaa\xf1\x1b\xa2p\xb3\xa7\x0b\r\xb3\xb1.0\xe2"\xceNj\xac\xb5\xcd3\xf5#PI(=\xf3\x1d&\xf6\xb4\x1a\x1e\xd9\xf9s\x1d$\x1cTR\xdd\xe7^\x10\xacS2pV\x80\x97\xe14(n\xaf\x91\xbc\xa3\x93?2\xff9P\xfc3\xe5\x9a\x87\xcc\xf1\xc9\xb0\xa7\xe2h\x99W\x99\xc6\xe6\xbb$\xbdx\xf2\x04\x0b\x0c+\xc1\xa8\x835\xe5\xcf\xd7\xa7\x03\xe1\x01A\x0e\xd8\xc7\xa5\xe6\xdeuI\xe1\xdd\x14\x03\xf0\xaa~r\x19I\x7f\xa3\xd1~S\xec\xf4\xebE@\xca"\xc1\x0e\xf1q\x8a\x88\xd3\x7f\xe3\x9d\x9d\xd7F\xbf0\xfe\xa0\x91\x88R\x04\xf6s\xcf&\x15R\x92\xc9]fB\xdd\x03\x1crfY\xa0Q\x82+\xed{]\x1a\xa68p\x89\xe8\xb4H\x0cK\xbf\x98\xef\xce(\xa6\xd9\xcb\x13\xfb\x88\x04*Fq\'\xfa\\\xf60\x19\xa4\xc5\xc9\xb2\x1eg\x8d\x9b\x13\xa3\xfc\xf3Q\xcd`$\xb4\xbd\xedbn8\x8c\x19Y\x03\xb7D\xb7}\xb7I>{\x1dX\x14\xfba?")\x93\xeeq\xdfJWfh*s\x8a\x17H\xfa\x9d\xaf\x86\x90\xd0^g\x01\x85\x17\x9a$\xb4\x90&b\xa8wZ$\x19Z\xe7\xaaW\xf3^\x15\x98\x8d\x850\xc8\xe5=\xc6K\xce\xfd\xf5\xfa(x;Zr\xaba\x84I\x1e\xaa\x08:\xfd\xbaWUQ(Q<\x8a\xfa\x94+g\xcd\xe0\x9c\xfe\x8a=\xc3\x8b0\xc1\xcf\x8f\x08\x07\xb6\xaa]\xd2\xe4\xbe-%\xed{\x00/\xbb\xa3~1\xcdcD\xaf\xee\xc3\x127m\xddB\x89\x8dx\x07q\xbcTN<\x0e2\x90\x8dn-@*cM\x9c*S\xe4\xc2s\x1a\xab6;\xf1\x9b\xa1A\xb5\xca\x9a\xd9/|b6\x85\xf0~\xe2*\x94\xaf\xe0n\xe3\xe0y\xe5Pv\xfb4\x99\xf6b\xa7T1k\x91#5Yx\x9cwN\x80\xae\xff\xb5F\x17E\x8d[\xbcp\x07\x87\x05\x9d\xc3"\x17s\x01F\x0c\t}\x9fEPz\xddwN\x12)\r~\xc01&\xff*H$\xf2\xd8\xbdJ\x81\xee\xf4a\xbbO\x85\x1f^d\x03\'\xe2\xa30h\x80\xb52\x97\xe4q\xa3]\xbd\xf5:V\x04(\xc4\xd9&\xd9\xa0\x91\xd3 .\x04\x13\xff\x19\xe9\x11\x1b\x00B\x91\x01\x1f\x03\xbe\x1b\xcaK\xee\xb1\xd0\xd4\xf1\x88j]\xe8.W\xca>\x12\xa0^b\xd9)\xec=\x10F\x0e\xf8\xaa\xa3W\xb7\x8b\xd4\x144\xb2\xea\xb1cK\xb2j\xde8\x95\xaf\x87\xda\xc7\xd8\x8e\x8a\x86R\xc3\xe6\xd42\x11\x0b:\n\xe4\\\x00\xad\xd4\xe3\xff\xc7\x91{s\xd0S\xdds\x8c\\\xe1\';\xd1p\xe9\xa6\x8d\x84\xec"\xbbH\xc0\xd2\xf1*\x85\xf0\xf98g\x8a\x0f\x08\xb2\x97\xa3\x8aA\xc5k\x944\r\xd1s\x07\x9b+\xd7\xf0\x0c\xe0\xfb\xe3\xce\xccu\xae\xe7\xdcL9<\xaeJ\x9bp\xd5i\xabwo\x14\x0f\xa4W\xd0\xd1\xa6\x9aPv\xbdn\xf9\xbc\x82\xe9hU8f\xdf\xd8-$s\xb9]18\xe5\xe9\xc2~\xaa\x13\xf2\xbaa\x1a\xbe\xa3\x9d\xd9\x95\xb9\xc6v\x04\x85\xd5\xcf\x81ma2\xc2j\x97\xb9\xb6\x1c\xb4\x1a\x84\x96p/\x06 \x869jE\x98\xa8\xfa\xaa\xba\xee\x88u:V)-\x04\x81\xff\xb0\x12y\xbc\xfcqr\x009\x9a\x1a\xbe\xb8\xf24\x0b\xa6\xcbc\x9bq=\x8b\x12\x82;$\xc2\xd6\xf1\x0c\xe92\xear\xe4]\x95\xae\x1e\x94\x92+--\x95zG\xf4\xc3\x12\xdd\xedj\x06\xc5\x1f\xc8\xbd\xe6g\xf3\xe6\x12K\xdc\x86A\x80E\xabjn\x80\xa7?\x9d\x8e\x19\x87\x95\x8bT\x06\xee\xf0qyDdw\xc8\xc1\xda\xd83\x100\x10\x94.\xc2\'k\xfc\xad\xb9=\x8aP\xdao`\xba\x82\xdf\xf7\xc2\xe5m\xb1\xea|b\xd7C\x02\x9b\x89b~\xac<\x92\xc2\n\x95\xa5\xa8\xb1\xf6WZ\x80\xc5\x08\x1e\to{\xdc?\xce\xda\x91\x02\xb5\xed}5\xbd=\x12?R\xb6D\xed\xbc]#l\r\x9a\xfeT\xc3$}up\xaf\xc8\x1c1\xfc\x1b\x1ei\xad\x12\xf8{\x95\xd2\x836\xe3\n\xe2\x02\xa3\xd4\x8b\xfa\x08\x07&\xf7\xdd\x84w.\xd5:@\xc8|\xb9`\x84D\xdb\x13v-?!\xed\xe3\x05\xd5\xa9\xa5j~\xee\x0c\xde\xe9R\xe8\x92\x1c.\':\xa4\xc9\xb4\xda\xa0\x10\xba\xb8\xf7*\xbf\xc9Z\x06\xe6\x97\xd5\xa2\xafd\xb3lnv\xc5\xa8\xdbD\xa5\xc8\x12\x978\x0b6#\xf3E\xc3F\xc8\xce\x87gp\xfb\x9f\xb5C:\x8c\xd0wF\xa0\x7f/]VV\xc7\xf2v\xff\xcaS*L\x08\x06\xfeCXY\xcbx\xaa\x9e\x12\x0c\x1fy\x15>\x16_\x93\xdeG"e\xf6\xf0\x94!\xd8W\xa8,\x90_\xfc\x06\x94V\xbayRZS)&\x93W\xaa\x92\xd8`\xdf(R\x95\xa0K\xd8\r\xa4\xf3\x8e7\x1a\xed\xee\xcf\x08\xcdl\xda\x0c\xad7|\x1e\x99I\x0f\xfa\xa80\xd2\xe0e\x9d\xd4\xc7\xe5\xf8}\xb0\xfd\xca$x\x1d\x7f\xbe*\xa6Qk\xe4\x10v\xeb4\x9d\xf6c\xe7T\xad\xe6]4H)]\xd1\xeb\xff\xbd\x96{\xd16C\xa6\x1e2)\x04\xa1e}\x94\x14\xbe1g/\xc2\xa0\xbee\xa7+\x87\x96\xeag\x07\xf2*\x95\x0e\x13\x97>1\x87\xc6\x9e\xdf\xed\xe05W&\x80\x14\xe9\x9f\x06"\xcd\xfd\xc4\xf4+<\x060\x95\xfe=\x0b\x95\xa9\x99D\xd6k\xa1\x9d\xaa\x12z\xc8\x7f\xdaw\x1ax\xb9\x97I;\xe2\x0b\x9e\xd5m\x9c9;\x92\x93Y;\x887X\x99\x9elD\xdc~\x8d\xed\xbe\xc6\x95\xb4\xc4\xa5\xe0\xcc\x11\x00\x1eL\xedr9\x8cG\x83\xe6\xa8\xc0\x9d\x12\x9f1e\x91\xdb8\xdc\xc8/\xb7\xb4\xcd\xcb\xff\xa2\x92E\xc0f\xba\xf3<\x87=\xa2T\x17\x9e\xc6\x1fg\xb7\xc0\xd2\xa3\xff\xd7\x91\x7fs\xd1\x13\x9b\xad\xa0\xa1h"\x16\x90;9\xd49\x1e\xc7M\x18\xba\x9b\xd3\xb8M\xa4fe/\xadv\xc1Wg\xdb\x96\xd9\x8cQ\x99\xc8b\xb0\x7f\xd0\xb9r\x08\xb9\xca\xce\x19\xbc\xc3^t\xa4\xa8Yc\xc0\x9f\x05\xa1\x08k\xdb\x96\xd9\x8c\x91\xd8\xe0,\xb4\x13\xd3\xd9&\x1a\xeb$\xcbW\x10d\x01Q\xe3"\xd2i\xfc\x85\x9f\xbd\xbe\xbd\xce\xabNg*ee\xd4#{\'\xcc]M\xb9\xc1f\xcb\t2#,\xa4f\x0bM\x8a\x99]\x08\x1d\xd5I:\xfd\x97\xf3\xc4\xa2\xcf\x18\x89v\xac\xe5\x06\x9eC\x04H\xe5\x88\xd2\xa0\xeb\xe0}\xc1\x84:\x9e\xe0-\xef\xc54^d\xb0d1\xef\x96NA\xb1M\x1f^\xf2r\xcb^zp\xcc\x8dp\x10l\x13\xc9\x117\xcb\x86\x8b\xee\xc9\x92\x15\xdf\x01m\xcc\x03\xcd\x9d\xe9\x9c\xcd\xe8\x84\xddq\xf0\x026\x85\x01h\x9bL\xee\xe7\xed\x9a\xe8\xd0~2F1\xc6@C\xb0\x85\xef\x8d\xd6b\xe8-\xd9\xbd\x05\x07\xa8P\xb0\xdc\x07\xec\x9b\x1e\x00\xf9B\\\'u6*w\xa3\xb7\x8d\xdf\xfeD\xe7\xbb\x9e4\xc2\x03\x1c\xb8\x1bS\x99\n\x9fx\xb3\x16\xa6\xba\'\xec\xaa\x85d\xc2\xac\x9c\xc3@@\xd0\x85\xe3\r7\x9dH\x168\xc4\xad\xa3]\x18\xa8\x1a\xac\x16\xf4\xff`\x1cU\x015\x0e\xe3#pR}\xf2\x97s+\xa3\x15\x18m\x85\xe5p\x99n\xfa\xe1\x9c\x85J\xc58n\xf6\x1a\xf3\x81X\xae\xa1s\xc29\xa0&\x13\xb5\xdc\x13\x07\x9eq|\xf1%\xe9\xa9\xcf\xa3\x95!\xcf\x1c\xa9X\xca\x9e\x16\x10\x12Bn\xd5.\xa9Nz\xe0\xd3\xbd\x82x\x844lj\xf3\x86\x87\\~\xc9f\x08\x8d\x8a\xa9]\x041\xfd\x1d\x87"\xb3\x9d~T$\x1a%v\xb5\xa0\xcf3\x1a\xf9\t\xd3\xb0\x9e\xb2\x96\xed\xcfP\xee\r\xb5\x98\x19q\xa3\xbc\xd1b\x14\xa3\x01r\x0c\x04=\x9c\x95PNJQ:\xb5\xfd\x1f\x03v\xe9\xe0tYH\n\x16\xb820\x08\xf0]0\x11\x7f\xb3\x80\x10\xe4\xba\xf8\x16\x15v\n\xcb\x1d\x1d\x90\xa3.\x90>\x8f\x05i~\xf4\x15\xea\x8b\xb5\r\x84\xa4}T\x00__o}\xe0\xacf\xd7\xea\x81 \xb5\xd1\xde\x85\xa11\xb8.C\xd9\x84\xc5z\xc1R@\xaaL\xb6Iv\x1e\x85y\xbc\x84\x18J\xcfXz\x8f\x01=E\xf4\xff\xd0\xad\xb9&{\x18\xa7\xd2\xc4,F^\xa0\x11MZ5\x00S\x1e\xa1\x90VQ]j\xceE\xeb7\x9fy\xd9\xf3\x8a\xf7\xd0\x0e\x1c\xdf\xbd\xb1.\x94\xe9A88\x95\xe4\xcb\x01R\xf3(\x9d\xd2\x93\xffr|N\xb4\xc3f\xa3J+G\xf5n\x80SzS\xbd\x1ek,\xfe\x1d2\x1a;\x08\xd4\x0fk\xfc1\xa3\x1b\x8e\x9dO\x10j\xd7\xcc\xe7\xd7\xec$\xbb\x13\xc4\x86b\xec\x8f\n8\xf1+\xbc\xbf\x03c\xc8\x7f\xab\xfb\x97\x12\xf7\xac\x039@[|A\x80\xaa\x01\xe4\x0e\xf6\xec\xd4\x9c\x8ec\xf9\xd4&\xc5\xeeM\xea,\xf9\xa4\x16\x90\xcf\xc7pED\xa5\n\x1b;\xc4n\n\xef:\xb4\x80\xc4\x87JeH\x95J\x9b\x0f|\xf49Rt*\x04jw]\xb4j\x1ev\xa0L\xf3\x16\x81\xac\xdbV\x10*\xf8;\x1b[\xfd\xa7\'\xb64\x00\'\xaa\x80^\xd4\n\x08]\x1c.=\xdfw\x89K\x07?\xcf\xc1<_&0\xb2H6\x1e\x95y\xb8\x9cnss>\x99f(q\xd5D\xb3\xe3&\xfdx\x10\x06f\x1d?+\x04\x7f@\x1bou\xb5@\x13\r\xde\xb4\\d\x1dO@e\xa9\x91z\xfc\xfa\xf5\x0c\x94\xf7\xc6\xd9\x11\xd3\xd1\x81\x82<\xe1\xc7\xb8$\xaf\t\x12\xfc\xa0\x00t\x10\x9bA,\x894\xba\xe0\x9f\x86\xdd\x8c\x82\xfd|\'\xb0\x0e\xcf\xa3u\x9d\xe7\x86\xe4\x1a\x05\xde\xab\xc8Wz=obUj\x83\xe3\xecU\xfb\x83n\x9f ~\xccX\xd3|\xbe^\xb4\x8f-n\xd7N\x86\xe3\x87jJ\xd2?\xd2L\xf9\xbd2\xf5\xd9X\x08aI\xa1\x03ML\xb1r\x9a"*\x98\xfb>\x14\xfe\x97\x86u\xc3\x98\x8d\xee]\x84|\xe8M\x99\x02\xcbk\xe49\x02\xf8\xd1\x82pF\x86\xd7\xa3\xc0\xce\xf0x0\xf9\x97\xb9#9\t\x96\xc0\xed\xe9%C%S,\xdcco\xea\xce\xbeb\xcf\xc5\xdcm$\x17[\xc1\x18\x7f\xdc\xa1\xae\xae\xbbn\xfdX\xff\xe7\xcb\xbe\xadf\x8el\x90\xe5\x97\x0br~J\x80\x93\xf3\r\xd2\xd5\xde\x9a\xb5\xf6\xfd2\x02\xa4\xafU^;\x8a\xab\xa7\xb2\x95k\xb0\xb6N\x97<\x92&8\xe1\x91\xac\x1a\xaf:\xd4[Lwt9l\x04zO\xff\x98ap :]\x07\xb4\xe6GY.\xc0\x1a\x88+\xc4n5]\xa0\xc9\n\x0c\xfb\xf1\xc1\x11+\xaf<\xd5\xef)\x83\xf7:4Y\xe9|\x99V\xf1/\xaa+]\xbf\xfe!\xd7\xfc;\x9d\xc2\xef\xdc\x0b\xed\xc6\x93\x02\xb7\xd6T\x00\xfc\x0c\xb8G\xa6\'<\xac\xebz)\xb8\x1c\xeb^c\x8b\x05\xad\xae\xe1\xc5\xefU\xc3w`\xdd\xfbRp*\xc3\xc3f7(zuQ\xa1\x99\xf8\xdeu\x0b|<\xf6\xda\xdcr\xdcx\xf7\xef\xc7S)\x95&\x9e\xae4\xb6\xaftW\x1d\x114\xa0\x83\x03\xd7\xffG\xaeq\xed6~pj\r\x8d1y\xca\x9abDu}%\x8b\xea\x89\xd7\xb8\xf3E\xa8\x86T@:=\xa9\xd2\x06\xfd\xcc\x14!\x98\xb7\xd4\xf3T\xa4\xcd\xe7\'\x97\xe9\x13\x9dAeo\xa2\xab\n\xf8OI+E\xcf\x8b\xd6l\xc6\xd9l\x92\xf0\x99\x8c\x8dA\x93\x03\xb9\x94\x87J\x08\xe7[O\xa5np{\xc6\xb3\x9f\xc6\r\r_]\x15eB\xdan\xb2y\xf1\x1a\xdc\xe3\x8c\xf7\x8fE\xb8\x0f\x8b\xc2\xb0\xfcTT\xd3\xe02@\x11W\xc7%;\xc8\xa7\xd1\xa4\xf6\xdd\xdb%\x97\xacs\x0b\xc8?\xc8\xa93\x10u\x91^\xfe|(\'\xabix]\x8eoR\xad\x8f\xb60y\xce\xc5\xea\x18\x18\xd47X<z\x80\xf3\xcc\xbf\x81\xea@~\xa5\xad\x01\x11x\xc4\x12s\xc2\x14\xc7J!\xb2\xd7\x90\xb0\xbd%\xf4\x90\xc6_F:5`\xda9{b\xacFt=G\x0evW\x1d\x00\x03\xb8B18\x97\xee|\xaf\x9e\x8f\xcb\xa8\xdd0[OGJQ\xd0/ \xdbC\xd5\xd0\x16\xda\x0c*,\x89sB\x16\xf0L\xe62\x9e\x83\xf8\x18\xf0\xa6\xa6\xca\x90\xcbm\x9d\x8a\xd8_\t\xb2z\x1b?\'\x1e\x10\xb0\xf88W\x03\x8f\xb3\xc6$\xa1\x8b\x0f\x15j\x91\xeb\xf9\xc7\xa1o\x81N\xb0c\x08\xe4*4d\xf5\'\x92`\x83\x9f\xb93\x10G:\x80T\xff\xcd!c|H^\x06\xc66\xf5\x8e\x9c\xf5UxLDQk_L\xdc\xe5\x84\xb7\xb0]&u\x0e\x8d(Y\xfcR`A\xae\xbf\x8e\n\xce\xf6\xba%%\xf5\x95\x98^\xfaU>\x13Fkd\x89\xcey\x96\x07\x809\xfa\xc1\x8cZ\xc8\xd3X\xd7\r)\xc4\x05I\x80\xae\xba\xdb\x17%\x91\x02\x0b^\x16\x19\xb5\xbc<WkN\xa5\x7f\xf1/QS\x17KvtTZ\xdc\xcdW\x11K\tIJ\xe5\xf0tP\xc9\xc98\xbe\xb5\xb5\x117\xf0\xf0\x0b2\xf87\xa9\x84\x97\xbc\xe0<\xb0g#e-M\n+\x87\x15\xb5\xc2\xd5\xcf\xc2\xcd\xb8oa\x7f\x0fa\xc9\x1f`5\xbf\xffW\x14\x1en\x1b8\xc6\x88o+\x80\x0f\x07\x18z\xf9TD\x1b\xe1S\xe0\x91\xa2S\xbcQ\x17\x18:\xde\xf7l\x0f\x7f\xcaU\xaa1\xdbI\x91u\xb2\xad\xf2y\xf5\x9f\x03\xc1rKX\x8e\xa6\xf5\xc9g\x86 _>|\x9c_\x0c\xe7H9\xc9\xc6\x9f\xd6{\xed\x95\x97\xf9Q\xbb\x01r8i\xd4\x032/\xa5\xcaG:\xf8|')))
for IiiIII111iI in o0O:
if I1IiiI in IiiIII111iI.keys():
for IiII in IiiIII111iI[I1IiiI][decode('\x10am\x15x\xb6Q8\xac\xb01\xea\xe2?\x8e')]:
security_warning(IiII[decode('\x12}u\x17x')] + decode('lfd\x1dx\xb6\r') + decode('\x0e`u\x0bn\xfe\x1fu\xb2\xac.\xeb\xe74\x99j([\xfa\xeb\x80\xa8\xb0\x11\x88\xf4D\xaaF\xfc\x9e\x08\x12}d\x082') + str(IiII[decode('\x0fp')]))
if 0:
Ii11111i * iiI1i1
if __name__ == '__main__':
from dummy import *
| 475.5625
| 13,343
| 0.72331
| 5,054
| 22,827
| 3.258211
| 0.276613
| 0.004737
| 0.001579
| 0.002793
| 0.013239
| 0.008016
| 0.004251
| 0
| 0
| 0
| 0
| 0.228298
| 0.019451
| 22,827
| 47
| 13,344
| 485.680851
| 0.507394
| 0.001621
| 0
| 0.078947
| 0
| 0.605263
| 0.697837
| 0.692615
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.105263
| 0
| 0.236842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fe06de440b8a2918e6cd7ea86e01bdd6693fcaa7
| 59
|
py
|
Python
|
blackpearl/modules/motion.py
|
offmessage/blackpearl
|
ffbba460fe7fc7fe4d7e3466f5ff13ea0c081fc5
|
[
"MIT"
] | null | null | null |
blackpearl/modules/motion.py
|
offmessage/blackpearl
|
ffbba460fe7fc7fe4d7e3466f5ff13ea0c081fc5
|
[
"MIT"
] | null | null | null |
blackpearl/modules/motion.py
|
offmessage/blackpearl
|
ffbba460fe7fc7fe4d7e3466f5ff13ea0c081fc5
|
[
"MIT"
] | null | null | null |
from .base import Module
class Motion(Module):
pass
| 8.428571
| 24
| 0.694915
| 8
| 59
| 5.125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.237288
| 59
| 6
| 25
| 9.833333
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
a3b12fdf96afad56d8f6694fb12c23964d53be3f
| 16,226
|
py
|
Python
|
src/py_gql/schema/differ/changes.py
|
johnpaulguzman/py-gql
|
5a2d180537218e1c30c65b2a933fb4fe197785ae
|
[
"MIT"
] | 6
|
2019-04-30T10:48:09.000Z
|
2021-08-19T15:57:53.000Z
|
src/py_gql/schema/differ/changes.py
|
johnpaulguzman/py-gql
|
5a2d180537218e1c30c65b2a933fb4fe197785ae
|
[
"MIT"
] | 6
|
2019-04-08T12:39:08.000Z
|
2020-08-10T15:00:18.000Z
|
src/py_gql/schema/differ/changes.py
|
johnpaulguzman/py-gql
|
5a2d180537218e1c30c65b2a933fb4fe197785ae
|
[
"MIT"
] | 2
|
2021-04-14T07:06:15.000Z
|
2021-08-19T15:58:46.000Z
|
# -*- coding: utf-8 -*-
from enum import IntEnum
from typing import Any, Type, Union
from .. import (
Argument,
Directive,
EnumType,
EnumValue,
Field,
InputField,
InputObjectType,
InterfaceType,
NamedType,
ObjectType,
UnionType,
)
class SchemaChangeSeverity(IntEnum):
"""
Severity level of a schema change.
"""
#: Change should be safe for all clients.
COMPATIBLE = 0
#: Change could break some clients or create silent issues depending on
#: which part of the schema they use.
DANGEROUS = 1
#: Change will break most clients.
BREAKING = 2
# TODO: Add docstring describing the reasoning behind every change on each
# SchemaChange subclass.
class SchemaChange:
severity = NotImplemented # type: SchemaChangeSeverity
format_str = NotImplemented # type: str
@property
def message(self) -> str:
return self.format_str.format(self=self)
def __eq__(self, other: Any) -> bool:
return isinstance(other, type(self)) and str(self) == str(other)
class TypeChangedKind(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"{self.type_name} changed from "
"{self.old_kind_name} type to {self.new_kind_name} type."
)
def __init__(
self,
type_name: str,
old_kind: Type[NamedType],
new_kind: Type[NamedType],
):
self.type_name = type_name
self.old_kind = old_kind
self.new_kind = new_kind
self.old_kind_name = old_kind.__name__[:-4]
self.new_kind_name = new_kind.__name__[:-4]
class TypeRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = "Type {self.type_name} was removed."
def __init__(self, type_name: str):
self.type_name = type_name
class TypeAdded(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = "Type {self.type_name} was added."
def __init__(self, type_name: str):
self.type_name = type_name
class TypeRemovedFromUnion(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"{self.type_name} was removed from union type {self.union.name}."
)
def __init__(self, type_name: str, union: UnionType):
self.type_name = type_name
self.union = union
class TypeAddedToUnion(SchemaChange):
severity = SchemaChangeSeverity.DANGEROUS
format_str = "{self.type_name} was added to union type {self.union.name}."
def __init__(self, type_name: str, union: UnionType):
self.type_name = type_name
self.union = union
class TypeRemovedFromInterface(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = "{self.type.name} no longer implements {self.interface.name}."
def __init__(self, interface: InterfaceType, object_type: ObjectType):
self.interface = interface
self.type = object_type
class TypeAddedToInterface(SchemaChange):
severity = SchemaChangeSeverity.DANGEROUS
format_str = "{self.type.name} now implements {self.interface.name}."
def __init__(self, interface: InterfaceType, object_type: ObjectType):
self.interface = interface
self.type = object_type
class EnumValueRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = "{self.value.name} was removed from enum {self.enum.name}."
def __init__(self, enum: EnumType, value: EnumValue):
self.enum = enum
self.value = value
class EnumValueAdded(SchemaChange):
severity = SchemaChangeSeverity.DANGEROUS
format_str = "{self.value.name} was added to enum {self.enum.name}."
def __init__(self, enum: EnumType, value: EnumValue):
self.enum = enum
self.value = value
class EnumValueDeprecated(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"{self.old_value.name} from enum {self.enum.name} was deprecated."
)
def __init__(
self, enum: EnumType, old_value: EnumValue, new_value: EnumValue
):
self.enum = enum
self.old_value = old_value
self.new_value = new_value
class EnumValueDeprecationRemoved(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"{self.old_value.name} from enum {self.enum.name} "
"is no longer deprecated."
)
def __init__(
self, enum: EnumType, old_value: EnumValue, new_value: EnumValue
):
self.enum = enum
self.old_value = old_value
self.new_value = new_value
class EnumValueDeprecationReasonChanged(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"{self.old_value.name} from enum {self.enum.name} "
"has changed deprecation reason."
)
def __init__(
self, enum: EnumType, old_value: EnumValue, new_value: EnumValue
):
self.enum = enum
self.old_value = old_value
self.new_value = new_value
class DirectiveRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = "Directive {self.directive.name} was removed."
def __init__(self, directive: Directive):
self.directive = directive
class DirectiveAdded(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = "Directive {self.directive.name} was added."
def __init__(self, directive: Directive):
self.directive = directive
class DirectiveLocationRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Location {self.location} was removed from directive "
"{self.directive.name}."
)
def __init__(self, directive: Directive, location: str):
self.directive = directive
self.location = location
class DirectiveLocationAdded(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"Location {self.location} was added to directive "
"{self.directive.name}."
)
def __init__(self, directive: Directive, location: str):
self.directive = directive
self.location = location
class DirectiveArgumentRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Argument {self.argument.name} was removed from directive "
"{self.directive.name}."
)
def __init__(self, directive: Directive, argument: Argument):
self.directive = directive
self.argument = argument
class DirectiveArgumentAdded(SchemaChange):
format_str = (
"{self.required_str} argument {self.argument.name} was added to "
"directive {self.directive.name}."
)
def __init__(self, directive: Directive, argument: Argument):
self.directive = directive
self.argument = argument
self.required_str = "Required" if argument.required else "Optional"
self.severity = (
SchemaChangeSeverity.BREAKING
if argument.required
else SchemaChangeSeverity.COMPATIBLE
)
class DirectiveArgumentDefaultValueChange(SchemaChange):
severity = SchemaChangeSeverity.DANGEROUS
format_str = (
"Argument {self.old_argument.name} of directive "
"{self.directive.name} has changed default value."
)
def __init__(
self,
directive: Directive,
old_argument: Argument,
new_argument: Argument,
):
self.directive = directive
self.old_argument = old_argument
self.new_argument = new_argument
class DirectiveArgumentChangedType(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Argument {self.old_argument.name} of directive {self.directive.name} "
"has changed type from {self.old_argument.type} to {self.new_argument.type}."
)
def __init__(
self,
directive: Directive,
old_argument: Argument,
new_argument: Argument,
):
self.directive = directive
self.old_argument = old_argument
self.new_argument = new_argument
class FieldArgumentRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Argument {self.argument.name} was removed from field "
"{self.field.name} of {self.context_str} {self.type.name}."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
field: Field,
argument: Argument,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.field = field
self.argument = argument
class FieldArgumentAdded(SchemaChange):
format_str = (
"{self.required_str} argument {self.argument.name} was added to field "
"{self.field.name} of {self.context_str} {self.type.name}."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
field: Field,
argument: Argument,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.field = field
self.argument = argument
self.required_str = "Required" if argument.required else "Optional"
self.severity = (
SchemaChangeSeverity.BREAKING
if argument.required
else SchemaChangeSeverity.COMPATIBLE
)
class FieldArgumentDefaultValueChange(SchemaChange):
severity = SchemaChangeSeverity.DANGEROUS
format_str = (
"Argument {self.old_argument.name} of field "
"{self.field.name} of {self.context_str} {self.type.name} "
"has changed default value."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
field: Field,
old_argument: Argument,
new_argument: Argument,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.field = field
self.old_argument = old_argument
self.new_argument = new_argument
class FieldArgumentChangedType(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Argument {self.old_argument.name} of field "
"{self.field.name} of {self.context_str} {self.type.name} "
"has changed type from {self.old_argument.type} "
"to {self.new_argument.type}."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
field: Field,
old_argument: Argument,
new_argument: Argument,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.field = field
self.old_argument = old_argument
self.new_argument = new_argument
class FieldChangedType(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Field {self.old_field.name} of {self.context_str} {self.type.name} has "
"changed type from {self.old_field.type} to {self.new_field.type}."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
old_field: Field,
new_field: Field,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.old_field = old_field
self.new_field = new_field
class FieldRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Field {self.field.name} was removed "
"from {self.context_str} {self.type.name}."
)
def __init__(
self, parent_type: Union[ObjectType, InterfaceType], field: Field
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.field = field
class FieldAdded(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"Field {self.field.name} was added to "
"{self.context_str} {self.type.name}."
)
def __init__(
self, parent_type: Union[ObjectType, InterfaceType], field: Field
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.field = field
class FieldDeprecated(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"Field {self.old_field.name} of {self.context_str} {self.type.name} "
"was deprecated."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
old_field: Field,
new_field: Field,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.old_field = old_field
self.new_field = new_field
class FieldDeprecationRemoved(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"Field {self.old_field.name} of {self.context_str} {self.type.name} "
"is no longer deprecated."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
old_field: Field,
new_field: Field,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.old_field = old_field
self.new_field = new_field
class FieldDeprecationReasonChanged(SchemaChange):
severity = SchemaChangeSeverity.COMPATIBLE
format_str = (
"Field {self.old_field.name} of {self.context_str} {self.type.name} has "
"changed deprecation reason."
)
def __init__(
self,
parent_type: Union[ObjectType, InterfaceType],
old_field: Field,
new_field: Field,
):
self.type = parent_type
self.context_str = (
"type" if isinstance(parent_type, ObjectType) else "interface"
)
self.old_field = old_field
self.new_field = new_field
class InputFieldRemoved(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Input field {self.field.name} was removed from {self.type.name}."
)
def __init__(self, input_type: InputObjectType, field: InputField):
self.type = input_type
self.field = field
class InputFieldAdded(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"{self.required_str} input field {self.field.name} was added "
"to {self.type.name}."
)
def __init__(self, input_type: InputObjectType, field: InputField):
self.type = input_type
self.field = field
self.required_str = "Required" if field.required else "Optional"
self.severity = (
SchemaChangeSeverity.BREAKING
if field.required
else SchemaChangeSeverity.COMPATIBLE
)
class InputFieldDefaultValueChange(SchemaChange):
severity = SchemaChangeSeverity.DANGEROUS
format_str = (
"Input field {self.old_field.name} of {self.type.name} "
"has changed default value."
)
def __init__(
self,
input_type: InputObjectType,
old_field: InputField,
new_field: InputField,
):
self.type = input_type
self.old_field = old_field
self.new_field = new_field
class InputFieldChangedType(SchemaChange):
severity = SchemaChangeSeverity.BREAKING
format_str = (
"Input field {self.old_field.name} of {self.type.name} "
"has changed type from {self.old_field.type} to {self.new_field.type}."
)
def __init__(
self,
input_type: InputObjectType,
old_field: InputField,
new_field: InputField,
):
self.type = input_type
self.old_field = old_field
self.new_field = new_field
| 28.516696
| 85
| 0.650684
| 1,704
| 16,226
| 5.968897
| 0.086268
| 0.036968
| 0.036771
| 0.025071
| 0.839052
| 0.829614
| 0.779766
| 0.747911
| 0.707305
| 0.650477
| 0
| 0.000499
| 0.258536
| 16,226
| 568
| 86
| 28.566901
| 0.844901
| 0.022741
| 0
| 0.67191
| 0
| 0.008989
| 0.187962
| 0.043264
| 0
| 0
| 0
| 0.001761
| 0
| 1
| 0.080899
| false
| 0
| 0.006742
| 0.004494
| 0.332584
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a3b84fc8b07fa5ef96971020d8248771dca7bbc9
| 29,646
|
py
|
Python
|
appiumbase/fixtures/page_actions.py
|
Pawan7593/AppiumBase
|
6e767619702d1be0404a89492544323df7dc540a
|
[
"MIT"
] | null | null | null |
appiumbase/fixtures/page_actions.py
|
Pawan7593/AppiumBase
|
6e767619702d1be0404a89492544323df7dc540a
|
[
"MIT"
] | null | null | null |
appiumbase/fixtures/page_actions.py
|
Pawan7593/AppiumBase
|
6e767619702d1be0404a89492544323df7dc540a
|
[
"MIT"
] | 1
|
2021-12-20T04:31:26.000Z
|
2021-12-20T04:31:26.000Z
|
"""
This module contains a set of methods that can be used for page loads and
for waiting for elements to appear on a page.
These methods improve on and expand existing WebDriver commands.
Improvements include making WebDriver commands more robust and more reliable
by giving page elements enough time to load before taking action on them.
The default option for searching for elements is by Accessibility ID.
This can be changed by overriding the "MobileBy" parameter.
Options are:
MobileBy.CSS_SELECTOR
MobileBy.CLASS_NAME
MobileBy.ID
MobileBy.NAME
MobileBy.LINK_TEXT
MobileBy.XPATH
MobileBy.TAG_NAME
MobileBy.PARTIAL_LINK_TEXT
"""
import codecs
import os
import sys
import time
from selenium.common.exceptions import ElementNotInteractableException
from selenium.common.exceptions import ElementNotVisibleException
from selenium.common.exceptions import NoAlertPresentException
from selenium.common.exceptions import NoSuchAttributeException
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoSuchFrameException
from selenium.common.exceptions import NoSuchWindowException
from selenium.common.exceptions import StaleElementReferenceException
from appium.webdriver.common.mobileby import MobileBy
from selenium.webdriver.common.action_chains import ActionChains
from appiumbase.config import settings
from appiumbase.fixtures import shared_utils as s_utils
def is_element_present(driver, selector, by=MobileBy.ACCESSIBILITY_ID):
"""
Returns whether the specified element selector is present on the page.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
@Returns
Boolean (is element present)
"""
try:
driver.find_element(by=by, value=selector)
return True
except Exception:
return False
def is_element_visible(driver, selector, by=MobileBy.ACCESSIBILITY_ID):
"""
Returns whether the specified element selector is visible on the page.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
@Returns
Boolean (is element visible)
"""
try:
element = driver.find_element(by=by, value=selector)
return element.is_displayed()
except Exception:
return False
def is_element_enabled(driver, selector, by=MobileBy.ACCESSIBILITY_ID):
"""
Returns whether the specified element selector is enabled on the page.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
@Returns
Boolean (is element enabled)
"""
try:
element = driver.find_element(by=by, value=selector)
return element.is_enabled()
except Exception:
return False
def is_text_visible(driver, text, selector, by=MobileBy.ACCESSIBILITY_ID):
"""
Returns whether the specified text is visible in the specified selector.
@Params
driver - the webdriver object (required)
text - the text string to search for
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
@Returns
Boolean (is text visible)
"""
try:
element = driver.find_element(by=by, value=selector)
return element.is_displayed() and text in element.text
except Exception:
return False
def is_attribute_present(
driver, selector, attribute, value=None, by=MobileBy.ACCESSIBILITY_ID
):
"""
Returns whether the specified attribute is present in the given selector.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
attribute - the attribute that is expected for the element (required)
value - the attribute value that is expected (Default: None)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
@Returns
Boolean (is attribute present)
"""
try:
element = driver.find_element(by=by, value=selector)
found_value = element.get_attribute(attribute)
if found_value is None:
raise Exception()
if value is not None:
if found_value == value:
return True
else:
raise Exception()
else:
return True
except Exception:
return False
def hover_on_element(driver, selector, by=MobileBy.ACCESSIBILITY_ID):
"""
Fires the hover event for the specified element by the given selector.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
"""
element = driver.find_element(by=by, value=selector)
hover = ActionChains(driver).move_to_element(element)
hover.perform()
def hover_element(driver, element):
"""
Similar to hover_on_element(), but uses found element, not a selector.
"""
hover = ActionChains(driver).move_to_element(element)
hover.perform()
def timeout_exception(exception, message):
exception, message = s_utils.format_exc(exception, message)
raise exception(message)
def hover_and_click(
driver,
hover_selector,
click_selector,
hover_by=MobileBy.ACCESSIBILITY_ID,
click_by=MobileBy.ACCESSIBILITY_ID,
timeout=settings.SMALL_TIMEOUT,
):
"""
Fires the hover event for a specified element by a given selector, then
clicks on another element specified. Useful for dropdown hover based menus.
@Params
driver - the webdriver object (required)
hover_selector - the css selector to hover over (required)
click_selector - the css selector to click on (required)
hover_by - the hover selector type to search by (Default: MobileBy.CSS_SELECTOR)
click_by - the click selector type to search by (Default: MobileBy.CSS_SELECTOR)
timeout - number of seconds to wait for click element to appear after hover
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
element = driver.find_element(by=hover_by, value=hover_selector)
hover = ActionChains(driver).move_to_element(element)
for x in range(int(timeout * 10)):
try:
hover.perform()
element = driver.find_element(by=click_by, value=click_selector)
element.click()
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was not present after %s second%s!" % (
click_selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def hover_element_and_click(
driver,
element,
click_selector,
click_by=MobileBy.ACCESSIBILITY_ID,
timeout=settings.SMALL_TIMEOUT,
):
"""
Similar to hover_and_click(), but assumes top element is already found.
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
hover = ActionChains(driver).move_to_element(element)
for x in range(int(timeout * 10)):
try:
hover.perform()
element = driver.find_element(by=click_by, value=click_selector)
element.click()
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was not present after %s second%s!" % (
click_selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def hover_element_and_double_click(
driver,
element,
click_selector,
click_by=MobileBy.ACCESSIBILITY_ID,
timeout=settings.SMALL_TIMEOUT,
):
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
hover = ActionChains(driver).move_to_element(element)
for x in range(int(timeout * 10)):
try:
hover.perform()
element_2 = driver.find_element(by=click_by, value=click_selector)
actions = ActionChains(driver)
actions.move_to_element(element_2)
actions.double_click(element_2)
actions.perform()
return element_2
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was not present after %s second%s!" % (
click_selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def wait_for_element_present(driver, selector, by=MobileBy.ACCESSIBILITY_ID, timeout=settings.LARGE_TIMEOUT):
element = None
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
#s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
def wait_for_element_visible(
driver, selector, by=MobileBy.ACCESSIBILITY_ID, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector. Returns the
element object if the element is present and visible on the page.
Raises NoSuchElementException if the element does not exist
within the specified timeout.
Raises ElementNotVisibleException if the element exists,
but is not visible (eg. opacity is "0") within the specified timeout.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object
"""
element = None
is_present = False
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
try:
element = driver.find_element(by=by, value=selector)
is_present = True
if element.is_displayed():
return element
else:
element = None
raise Exception()
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element and by != MobileBy.LINK_TEXT:
if not is_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
# The element exists in the HTML, but is not visible
message = "Element {%s} was not visible after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(ElementNotVisibleException, message)
if not element and by == MobileBy.LINK_TEXT:
message = "Link text {%s} was not visible after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(ElementNotVisibleException, message)
def wait_for_text_visible(
driver, text, selector, by=MobileBy.ACCESSIBILITY_ID, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector. Returns the
element object if the text is present in the element and visible
on the page.
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
Raises ElementNotVisibleException if the element exists in the HTML,
but the text is not visible within the specified timeout.
@Params
driver - the webdriver object (required)
text - the text that is being searched for in the element (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object that contains the text searched for
"""
element = None
is_present = False
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
is_present = True
if element.is_displayed() and text in element.text:
return element
else:
element = None
raise Exception()
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
if not is_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
# The element exists in the HTML, but the text is not visible
message = (
"Expected text {%s} for {%s} was not visible after %s second%s!"
% (text, selector, timeout, plural)
)
timeout_exception(ElementNotVisibleException, message)
def wait_for_exact_text_visible(
driver, text, selector, by=MobileBy.ACCESSIBILITY_ID, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector. Returns the
element object if the text matches exactly with the text in the element,
and the text is visible.
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
Raises ElementNotVisibleException if the element exists in the HTML,
but the exact text is not visible within the specified timeout.
@Params
driver - the webdriver object (required)
text - the exact text that is expected for the element (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object that contains the text searched for
"""
element = None
is_present = False
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
is_present = True
if element.is_displayed() and text.strip() == element.text.strip():
return element
else:
element = None
raise Exception()
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
if not is_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
# The element exists in the HTML, but the exact text is not visible
message = (
"Expected exact text {%s} for {%s} was not visible "
"after %s second%s!" % (text, selector, timeout, plural)
)
timeout_exception(ElementNotVisibleException, message)
def wait_for_attribute(
driver,
selector,
attribute,
value=None,
by=MobileBy.ACCESSIBILITY_ID,
timeout=settings.LARGE_TIMEOUT,
):
"""
Searches for the specified element attribute by the given selector.
Returns the element object if the expected attribute is present
and the expected attribute value is present (if specified).
Raises NoSuchElementException if the element does not exist in the HTML
within the specified timeout.
Raises NoSuchAttributeException if the element exists in the HTML,
but the expected attribute/value is not present within the timeout.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
attribute - the attribute that is expected for the element (required)
value - the attribute value that is expected (Default: None)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for the element attribute in seconds
@Returns
A web element object that contains the expected attribute/value
"""
element = None
element_present = False
attribute_present = False
found_value = None
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
element_present = True
attribute_present = False
found_value = element.get_attribute(attribute)
if found_value is not None:
attribute_present = True
else:
element = None
raise Exception()
if value is not None:
if found_value == value:
return element
else:
element = None
raise Exception()
else:
return element
except Exception:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
if not element:
if not element_present:
# The element does not exist in the HTML
message = "Element {%s} was not present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(NoSuchElementException, message)
if not attribute_present:
# The element does not have the attribute
message = (
"Expected attribute {%s} of element {%s} was not present "
"after %s second%s!" % (attribute, selector, timeout, plural)
)
timeout_exception(NoSuchAttributeException, message)
# The element attribute exists, but the expected value does not match
message = (
"Expected value {%s} for attribute {%s} of element {%s} was not "
"present after %s second%s! (The actual value was {%s})"
% (value, attribute, selector, timeout, plural, found_value)
)
timeout_exception(NoSuchAttributeException, message)
def wait_for_element_absent(
driver, selector, by=MobileBy.ACCESSIBILITY_ID, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector.
Raises an exception if the element is still present after the
specified timeout.
@Params
driver - the webdriver object
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
driver.find_element(by=by, value=selector)
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
except Exception:
return True
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was still present after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(Exception, message)
def wait_for_element_not_visible(
driver, selector, by=MobileBy.ACCESSIBILITY_ID, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element by the given selector.
Raises an exception if the element is still visible after the
specified timeout.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for the element in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
element = driver.find_element(by=by, value=selector)
if element.is_displayed():
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
else:
return True
except Exception:
return True
plural = "s"
if timeout == 1:
plural = ""
message = "Element {%s} was still visible after %s second%s!" % (
selector,
timeout,
plural,
)
timeout_exception(Exception, message)
def wait_for_text_not_visible(
driver, text, selector, by=MobileBy.ACCESSIBILITY_ID, timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the text in the element of the given selector on the page.
Returns True if the text is not visible on the page within the timeout.
Raises an exception if the text is still present after the timeout.
@Params
driver - the webdriver object (required)
text - the text that is being searched for in the element (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for elements in seconds
@Returns
A web element object that contains the text searched for
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
if not is_text_visible(driver, text, selector, by=by):
return True
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = "Text {%s} in {%s} was still visible after %s second%s!" % (
text,
selector,
timeout,
plural,
)
timeout_exception(Exception, message)
def wait_for_attribute_not_present(
driver,
selector,
attribute,
value=None,
by=MobileBy.ACCESSIBILITY_ID,
timeout=settings.LARGE_TIMEOUT
):
"""
Searches for the specified element attribute by the given selector.
Returns True if the attribute isn't present on the page within the timeout.
Also returns True if the element is not present within the timeout.
Raises an exception if the attribute is still present after the timeout.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
attribute - the element attribute (required)
value - the attribute value (Default: None)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
timeout - the time to wait for the element attribute in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
if not is_attribute_present(
driver, selector, attribute, value=value, by=by
):
return True
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
plural = "s"
if timeout == 1:
plural = ""
message = (
"Attribute {%s} of element {%s} was still present after %s second%s!"
"" % (attribute, selector, timeout, plural)
)
if value:
message = (
"Value {%s} for attribute {%s} of element {%s} was still present "
"after %s second%s!"
"" % (value, attribute, selector, timeout, plural)
)
timeout_exception(Exception, message)
def find_visible_elements(driver, selector, by=MobileBy.ACCESSIBILITY_ID):
"""
Finds all WebElements that match a selector and are visible.
Similar to webdriver.find_elements.
@Params
driver - the webdriver object (required)
selector - the locator for identifying the page element (required)
by - the type of selector being used (Default: MobileBy.CSS_SELECTOR)
"""
elements = driver.find_elements(by=by, value=selector)
try:
v_elems = [element for element in elements if element.is_displayed()]
return v_elems
except (StaleElementReferenceException, ElementNotInteractableException):
time.sleep(0.1)
elements = driver.find_elements(by=by, value=selector)
v_elems = []
for element in elements:
if element.is_displayed():
v_elems.append(element)
return v_elems
def save_screenshot(driver, name, folder=None):
"""
Saves a screenshot to the current directory (or to a subfolder if provided)
If the folder provided doesn't exist, it will get created.
The screenshot will be in PNG format.
"""
if not name.endswith(".png"):
name = name + ".png"
if folder:
abs_path = os.path.abspath(".")
file_path = abs_path + "/%s" % folder
if not os.path.exists(file_path):
os.makedirs(file_path)
screenshot_path = "%s/%s" % (file_path, name)
else:
screenshot_path = name
try:
element = driver.find_element(by=MobileBy.TAG_NAME, value="body")
element_png = element.screenshot_as_png
with open(screenshot_path, "wb") as file:
file.write(element_png)
except Exception:
if driver:
driver.get_screenshot_as_file(screenshot_path)
else:
pass
def wait_for_and_accept_alert(driver, timeout=settings.LARGE_TIMEOUT):
"""
Wait for and accept an alert. Returns the text from the alert.
@Params
driver - the webdriver object (required)
timeout - the time to wait for the alert in seconds
"""
alert = wait_for_and_switch_to_alert(driver, timeout)
alert_text = alert.text
alert.accept()
return alert_text
def wait_for_and_dismiss_alert(driver, timeout=settings.LARGE_TIMEOUT):
"""
Wait for and dismiss an alert. Returns the text from the alert.
@Params
driver - the webdriver object (required)
timeout - the time to wait for the alert in seconds
"""
alert = wait_for_and_switch_to_alert(driver, timeout)
alert_text = alert.text
alert.dismiss()
return alert_text
def wait_for_and_switch_to_alert(driver, timeout=settings.LARGE_TIMEOUT):
"""
Wait for a browser alert to appear, and switch to it. This should be usable
as a drop-in replacement for driver.switch_to.alert when the alert box
may not exist yet.
@Params
driver - the webdriver object (required)
timeout - the time to wait for the alert in seconds
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
s_utils.check_if_time_limit_exceeded()
try:
alert = driver.switch_to.alert
# Raises exception if no alert present
dummy_variable = alert.text # noqa
return alert
except NoAlertPresentException:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
message = "Alert was not present after %s seconds!" % timeout
timeout_exception(Exception, message)
| 35.419355
| 109
| 0.637793
| 3,660
| 29,646
| 5.051366
| 0.069399
| 0.010547
| 0.014063
| 0.019688
| 0.802629
| 0.764171
| 0.743563
| 0.727066
| 0.691367
| 0.656967
| 0
| 0.012597
| 0.287729
| 29,646
| 836
| 110
| 35.461722
| 0.862948
| 0.329657
| 0
| 0.73694
| 0
| 0
| 0.061747
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046642
| false
| 0.001866
| 0.029851
| 0
| 0.132463
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4aa8eec5eaba931fb3d7bcc8a415f0311c53112f
| 4,307
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/apim/_params.py
|
psignoret/azure-cli
|
1a4a043750315f9a7f2894b4287126089978b615
|
[
"MIT"
] | 1
|
2020-12-12T02:53:24.000Z
|
2020-12-12T02:53:24.000Z
|
src/azure-cli/azure/cli/command_modules/apim/_params.py
|
psignoret/azure-cli
|
1a4a043750315f9a7f2894b4287126089978b615
|
[
"MIT"
] | 2
|
2021-01-15T09:24:07.000Z
|
2021-01-15T09:30:10.000Z
|
src/azure-cli/azure/cli/command_modules/apim/_params.py
|
psignoret/azure-cli
|
1a4a043750315f9a7f2894b4287126089978b615
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
from azure.cli.core.commands.parameters import (get_enum_type,
get_location_type,
resource_group_name_type,
get_three_state_flag)
from azure.mgmt.apimanagement.models import (SkuType, VirtualNetworkType)
SKU_TYPES = SkuType
VNET_TYPES = VirtualNetworkType
def load_arguments(self, _):
from azure.cli.core.commands.parameters import tags_type
from azure.cli.core.commands.validators import get_default_location_from_resource_group
with self.argument_context('apim') as c:
c.argument('resource_group_name', arg_type=resource_group_name_type)
c.argument('tags', tags_type)
c.argument('service_name', options_list=['--name', '-n'], help="The name of the api management service instance", id_part=None)
c.argument('name', options_list=['--name', '-n'], help="The name of the api management service instance", id_part=None)
c.argument('location', validator=get_default_location_from_resource_group)
with self.argument_context('apim create') as c:
c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group)
c.argument('publisher_name', help='The name of your organization for use in the developer portal and e-mail notifications.', required=True)
c.argument('publisher_email', help='The e-mail address to receive all system notifications.')
c.argument('enable_client_certificate', arg_type=get_three_state_flag(), help='Enforces a client certificate to be presented on each request to the gateway and also enables the ability to authenticate the certificate in the policy on the gateway.')
c.argument('virtual_network_type', get_enum_type(VNET_TYPES), options_list=['--virtual-network', '-v'], help='The virtual network type.')
c.argument('sku_name', arg_type=get_enum_type(SKU_TYPES), help='The sku of the api management instance')
c.argument('sku_capacity', type=int, help='The number of deployed units of the SKU.')
c.argument('enable_managed_identity', arg_type=get_three_state_flag(), help='Create a managed identity for the API Management service to access other Azure resources.')
with self.argument_context('apim update') as c:
c.argument('publisher_name', help='The name of your organization for use in the developer portal and e-mail notifications.')
c.argument('publisher_email', help='The e-mail address to receive all system notifications.')
c.argument('enable_client_certificate', arg_type=get_three_state_flag(), help='Enforces a client certificate to be presented on each request to the gateway and also enables the ability to authenticate the certificate in the policy on the gateway.')
c.argument('virtual_network_type', get_enum_type(VNET_TYPES), options_list=['--virtual-network', '-v'], help='The virtual network type.')
c.argument('sku_name', arg_type=get_enum_type(SKU_TYPES), help='The sku of the api management instance')
c.argument('sku_capacity', type=int, help='The number of deployed units of the SKU.')
c.argument('enable_managed_identity', arg_type=get_three_state_flag(), help='Create a managed identity for the API Management service to access other Azure resources.')
with self.argument_context('apim backup') as c:
c.argument('backup_name', help='The name of the backup file to create.')
c.argument('storage_account_name', arg_group='Storage', help='The name of the storage account used to place the backup.')
c.argument('storage_account_key', arg_group='Storage', help='The access key of the storage account used to place the backup.')
c.argument('storage_account_container', arg_group='Storage', help='The name of the storage account container used to place the backup.')
| 79.759259
| 256
| 0.695147
| 587
| 4,307
| 4.90971
| 0.224872
| 0.074948
| 0.026718
| 0.031575
| 0.80118
| 0.748092
| 0.748092
| 0.701943
| 0.701943
| 0.701943
| 0
| 0
| 0.168563
| 4,307
| 53
| 257
| 81.264151
| 0.804803
| 0.084978
| 0
| 0.315789
| 0
| 0.052632
| 0.456671
| 0.03075
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0.105263
| 0
| 0.131579
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4359a82d2d89e17f6e2bdf15e6f31a5d16d9b7a8
| 16,596
|
py
|
Python
|
celloracle/visualizations/development_module_visualization.py
|
gordian-biotechnology/CellOracle
|
b160622711d26d3c283822b98560709ee8b3063b
|
[
"Apache-2.0"
] | null | null | null |
celloracle/visualizations/development_module_visualization.py
|
gordian-biotechnology/CellOracle
|
b160622711d26d3c283822b98560709ee8b3063b
|
[
"Apache-2.0"
] | null | null | null |
celloracle/visualizations/development_module_visualization.py
|
gordian-biotechnology/CellOracle
|
b160622711d26d3c283822b98560709ee8b3063b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os, sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from .config import CONFIG
def plot_cluster_whole(self, ax=None, s=CONFIG["s_scatter"], args=CONFIG["default_args"]):
if ax is None:
ax = plt
ax.scatter(self.embedding[:, 0], self.embedding[:, 1], c=self.colorandum, s=s, **args)
ax.axis("off")
def plot_cluster_cells_use(self, ax=None, s=CONFIG["s_scatter"], color=None, show_background=True, args=CONFIG["default_args"]):
if ax is None:
ax = plt
if s == 0:
color = "white"
if show_background:
plot_background(self=self, ax=ax, s=s, args=args)
if not hasattr(self, "cell_idx_use"):
self.cell_idx_use = None
if self.cell_idx_use is None:
if color is None:
ax.scatter(self.embedding[:, 0], self.embedding[:, 1], c=self.colorandum, s=s, **args)
else:
ax.scatter(self.embedding[:, 0], self.embedding[:, 1], c=color, s=s, **args)
else:
if color is None:
ax.scatter(self.embedding[self.cell_idx_use, 0], self.embedding[self.cell_idx_use, 1],
c=self.colorandum[self.cell_idx_use, :],s=s, **args)
else:
ax.scatter(self.embedding[self.cell_idx_use, 0], self.embedding[self.cell_idx_use, 1],
c=color, s=s, **args)
ax.axis("off")
def plot_background(self, ax=None, s=CONFIG["s_scatter"], args=CONFIG["default_args"]):
if ax is None:
ax = plt
ax.scatter(self.embedding[:, 0], self.embedding[:, 1], c="lightgray", s=s, **args)
#ax.set_title("Pseudotime")
ax.axis("off")
def plot_pseudotime(self, ax=None, s=CONFIG["s_scatter"], show_background=True, cmap="rainbow", args=CONFIG["default_args"]):
if ax is None:
ax = plt
if show_background:
plot_background(self=self, ax=ax, s=s, args=args)
if self.cell_idx_use is None:
ax.scatter(self.embedding[:, 0], self.embedding[:, 1], c=self.pseudotime, cmap=cmap, s=s, **args)
else:
ax.scatter(self.embedding[self.cell_idx_use, 0], self.embedding[self.cell_idx_use, 1],
c=self.pseudotime[self.cell_idx_use], cmap=cmap, s=s, **args)
ax.axis("off")
def plot_background_on_grid(self, ax=None, s=CONFIG["s_grid"], args={}):
if ax is None:
ax = plt
if hasattr(self, "mass_filter_whole_reference"):
mass_filter = self.mass_filter_whole_reference
elif hasattr(self, "mass_filter_whole"):
mass_filter = self.mass_filter_whole
ax.scatter(self.gridpoints_coordinates[:, 0],
self.gridpoints_coordinates[:, 1], s=0)
if "c" not in args.keys():
ax.scatter(self.gridpoints_coordinates[~mass_filter, 0],
self.gridpoints_coordinates[~mass_filter, 1],
c="lightgray", s=s, **args)
else:
ax.scatter(self.gridpoints_coordinates[~mass_filter, 0],
self.gridpoints_coordinates[~mass_filter, 1],
s=s, **args)
ax.axis("off")
def plot_pseudotime_on_grid(self, ax=None, s=CONFIG["s_grid"], show_background=True, args={}):
if ax is None:
ax = plt
if hasattr(self, "mass_filter_simulation"):
mass_filter = self.mass_filter_simulation
elif hasattr(self, "mass_filter"):
mass_filter = self.mass_filter
if show_background:
plot_background_on_grid(self=self, ax=ax, s=s, args=args)
else:
plot_cluster_cells_use(self=self, ax=ax, s=0, color="white", show_background=False, args={})
ax.scatter(self.gridpoints_coordinates[~mass_filter, 0],
self.gridpoints_coordinates[~mass_filter, 1],
c=self.pseudotime_on_grid[~mass_filter],
cmap="rainbow", s=s, **args)
ax.axis("off")
def plot_reference_flow_on_grid(self, ax=None, scale=CONFIG["scale_dev"], show_background=True, s=CONFIG["s_scatter"], args=CONFIG["default_args_quiver"]):
if ax is None:
ax = plt
if hasattr(self, "mass_filter_simulation"):
mass_filter = self.mass_filter_simulation
elif hasattr(self, "mass_filter"):
mass_filter = self.mass_filter
if show_background:
plot_background(self=self, ax=ax, s=s, args=CONFIG["default_args"])
else:
plot_cluster_cells_use(self=self, ax=ax, s=0, color="white", show_background=False, args={})
ax.quiver(self.gridpoints_coordinates[~mass_filter, 0],
self.gridpoints_coordinates[~mass_filter, 1],
self.ref_flow[~mass_filter, 0],
self.ref_flow[~mass_filter, 1],
scale=scale, **args)
ax.axis("off")
def plot_simulation_flow_on_grid(self, ax=None, scale=CONFIG["scale_simulation"], show_background=True, s=CONFIG["s_scatter"], args=CONFIG["default_args_quiver"]):
_plot_simulation_flow_on_grid(self=self, ax=ax, scale=scale, show_background=show_background, s=s, data_random=False, args=args)
def plot_simulation_flow_random_on_grid(self, ax=None, scale=CONFIG["scale_simulation"], show_background=True, s=CONFIG["s_scatter"], args=CONFIG["default_args_quiver"]):
_plot_simulation_flow_on_grid(self=self, ax=ax, scale=scale, show_background=show_background, s=s, data_random=True, args=args)
def _plot_simulation_flow_on_grid(self, ax=None, scale=CONFIG["scale_simulation"], show_background=True, s=CONFIG["s_scatter"], data_random=False, args=CONFIG["default_args_quiver"]):
if ax is None:
ax = plt
if show_background:
plot_background(self=self, ax=ax, s=s, args=CONFIG["default_args"])
else:
plot_cluster_cells_use(self=self, ax=ax, s=0, color=None, show_background=False, args={})
# mass filter selection
if hasattr(self, "mass_filter_simulation"):
mass_filter = self.mass_filter_simulation
elif hasattr(self, "mass_filter"):
mass_filter = self.mass_filter
# Gridpoint cordinate selection
if hasattr(self, "gridpoints_coordinates"):
gridpoints_coordinates = self.gridpoints_coordinates
elif hasattr(self, "mass_filter"):
gridpoints_coordinates = self.flow_grid
# Arrow selection
if data_random:
flow = self.flow_rndm
else:
flow = self.flow
ax.quiver(gridpoints_coordinates[~mass_filter, 0],
gridpoints_coordinates[~mass_filter, 1],
flow[~mass_filter, 0],
flow[~mass_filter, 1], #zorder=20000,
scale=scale, **args)
ax.axis("off")
def plot_inner_product_on_grid(self, ax=None, vm=1,s=CONFIG["s_grid"], show_background=True, args={}):
if ax is None:
ax = plt
if show_background:
plot_background_on_grid(self=self, ax=ax, s=s,
args={"facecolor": "None",
"c": "None",
"edgecolors":'black',
"linewidths": 0.05})
else:
plot_cluster_cells_use(self=self, ax=ax, s=0, color=None, show_background=False, args={})
ax.scatter(self.gridpoints_coordinates[~self.mass_filter_simulation, 0],
self.gridpoints_coordinates[~self.mass_filter_simulation, 1],
c=self.inner_product[~self.mass_filter_simulation],
cmap="coolwarm", vmin=-vm, vmax=vm, s=s, **args)
ax.axis("off")
def plot_inner_product_on_pseudotime(self, ax=None, vm=1, s=CONFIG["s_grid"], args={}):
if ax is None:
fig, ax = plt.subplots()
pcm = ax.scatter(self.pseudotime_on_grid[~self.mass_filter_simulation],
self.inner_product[~self.mass_filter_simulation],
c=self.inner_product[~self.mass_filter_simulation],
cmap="coolwarm",
vmin=-vm, vmax=vm, s=s, **args)
ax.set_ylim([-vm*1.1, vm*1.1])
ax.axhline(0, color="lightgray")
pp = plt.colorbar(pcm, ax=ax, orientation="vertical")
sns.despine()
ax.set_xlabel("pseudotime")
ax.set_ylabel("inner product score")
def plot_inner_product_as_box(self, ax=None, vm=1, args={}):
if ax is None:
fig, ax = plt.subplots()
sns.boxplot(data=self.inner_product_df, x="pseudotime_id", y="score", color="white", ax=ax)
ax.set_xlabel("Digitized_pseudotime")
ax.set_ylabel("inner product score")
ax.axhline(0, color="gray")
ax.set_ylim([-vm*1.1, vm*1.1])
ax.tick_params(
labelleft=False)
sns.despine()
def plot_quiver(self, ax=None, scale=CONFIG["scale_simulation"], color=None, s=CONFIG["s_scatter"], show_background=True, args=CONFIG["default_args"]):
_plot_quiver(self=self, ax=ax, scale=scale, color=color, s=s, show_background=show_background, args=args, data_random=False)
def plot_quiver_random(self, ax=None, scale=CONFIG["scale_simulation"], color=None, s=CONFIG["s_scatter"], show_background=True, args=CONFIG["default_args"]):
_plot_quiver(self=self, ax=ax, scale=scale, color=color, s=s, show_background=show_background, args=args, data_random=True)
def _plot_quiver(self, ax=None, scale=CONFIG["scale_simulation"], color=None, s=CONFIG["s_scatter"], show_background=True, args=CONFIG["default_args"], data_random=False):
if ax is None:
ax = plt
if not hasattr(self, "cell_idx_use"):
self.cell_idx_use = None
if self.cell_idx_use is None:
ix_choice = np.arange(self.embedding.shape[0])
else:
ix_choice = self.cell_idx_use
# Plot whole cell with lightgray
if show_background:
ax.scatter(self.embedding[:, 0], self.embedding[:, 1],
c="lightgray", alpha=1, s=s, **args)
ax.scatter(self.embedding[ix_choice, 0], self.embedding[ix_choice, 1],
c="lightgray", alpha=0.2, edgecolor=(0,0,0,1), s=s, **args)
if color is None:
color=self.colorandum[ix_choice]
quiver_kwargs=dict(headaxislength=7, headlength=11, headwidth=8,
linewidths=0.25, width=0.0045,edgecolors="k",
color=color, alpha=1)
if data_random:
quiver = self.delta_embedding_random
else:
quiver = self.delta_embedding
ax.quiver(self.embedding[ix_choice, 0], self.embedding[ix_choice, 1],
quiver[ix_choice, 0],
quiver[ix_choice, 1],
scale=scale, **quiver_kwargs)
ax.axis("off")
def visualize_development_module_layout_2(self, scale_for_pseudotime=CONFIG["scale_dev"],
scale_for_simulation=CONFIG["scale_simulation"], s=CONFIG["s_scatter"], s_grid=CONFIG["s_grid"], vm=1, show_background=True):
if self.name is None:
name = "Selected lineage"
else:
name = self.name
fig, ax = plt.subplots(3, 4, figsize=[20,15])
ax_ = ax[0, 0]
plot_cluster_whole(self, ax=ax_, s=s)
ax_.set_title("Whole population")
##
ax_ = ax[0, 1]
plot_cluster_cells_use(self, ax=ax_, s=s, color="#EC7063", show_background=show_background)
ax_.set_title(f"{name}")
####
ax_ = ax[0, 2]
plot_pseudotime(self, ax=ax_, s=s, show_background=show_background)
ax_.set_title("Pseudotime")
###
ax_ = ax[0, 3]
plot_reference_flow_on_grid(self, ax=ax_, scale=scale_for_pseudotime, show_background=show_background, s=s)
ax_.set_title("Development flow")
####
ax_ = ax[1, 0]
plot_quiver(self, ax=ax_, scale=scale_for_simulation, color="#EC7063", s=s, show_background=show_background)
ax_.set_title(f"Perturb simulation \n color: {name}")
####
ax_ = ax[1, 1]
plot_quiver(self, ax=ax_, scale=scale_for_simulation, color=None, s=s, show_background=show_background)
ax_.set_title("Perturb simulation \n color: cluster")
ax_ = ax[1, 2]
plot_simulation_flow_on_grid(self, ax=ax_, scale=scale_for_simulation, show_background=show_background, s=s)
ax_.set_title("Perturb simulation")
#####
ax_ = ax[1, 3]
plot_cluster_cells_use(self, ax=ax_, s=s, color="#EC7063", show_background=show_background)
plot_simulation_flow_on_grid(self, ax=ax_, scale=scale_for_simulation, show_background=False, s=s)
ax_.set_title("Perturb simulation")
ax_ = ax[2, 0]
plot_inner_product_on_grid(self, ax=ax_, vm=vm,s=s_grid, show_background=show_background)
ax_.set_title("Inner product of \n Perturb simulation * Development flow")
ax_ = ax[2, 1]
plot_inner_product_on_grid(self, ax=ax_, vm=vm,s=s_grid, show_background=show_background)
plot_simulation_flow_on_grid(self, ax=ax_, scale=scale_for_simulation, show_background=False, s=s)
ax_.set_title("Inner product of \n Perturb simulation * Development flow")
ax_ = ax[2, 2]
plot_inner_product_on_pseudotime(self, ax=ax_, vm=vm, s=s_grid)
ax_ = ax[2, 3]
plot_inner_product_as_box(self, ax=ax_, vm=vm)
def visualize_development_module_layout_1(self, scale_for_pseudotime=CONFIG["scale_dev"],
scale_for_simulation=CONFIG["scale_simulation"], s=CONFIG["s_scatter"], s_grid=CONFIG["s_grid"], vm=1, show_background=True):
fig, ax = plt.subplots(2, 4, figsize=[20,10])
ax_ = ax[0, 0]
plot_cluster_cells_use(self, ax=ax_, s=s, show_background=show_background)
##
ax_ = ax[0, 1]
plot_pseudotime(self, ax=ax_, s=s, show_background=show_background)
####
ax_ = ax[0, 2]
plot_pseudotime_on_grid(self, ax=ax_, s=s_grid, show_background=show_background)
ax_.set_title("Pseudotime on grid")
###
ax_ = ax[0, 3]
plot_reference_flow_on_grid(self, ax=ax_, scale=scale_for_pseudotime, show_background=show_background, s=s)
ax_.set_title("Development flow")
####
ax_ = ax[1, 0]
plot_simulation_flow_on_grid(self, ax=ax_, scale=scale_for_simulation, show_background=show_background, s=s)
ax_.set_title("Perturb simulation")
####
ax_ = ax[1, 1]
plot_inner_product_on_grid(self, ax=ax_, vm=vm,s=s_grid, show_background=show_background)
ax_.set_title("Inner product of \n Perturb simulation * Development flow")
ax_ = ax[1, 2]
plot_inner_product_on_pseudotime(self, ax=ax_, vm=vm, s=s_grid)
#####
ax_ = ax[1, 3]
plot_inner_product_as_box(self, ax=ax_, vm=vm)
def visualize_development_module_layout_0(self, scale_for_pseudotime=CONFIG["scale_dev"],
scale_for_simulation=CONFIG["scale_simulation"], s=CONFIG["s_scatter"], s_grid=CONFIG["s_grid"], vm=1, show_background=True):
fig, ax = plt.subplots(2, 3, figsize=[20, 13.5])
ax_ = ax[0, 0]
plot_cluster_cells_use(self, ax=ax_, s=s, show_background=show_background)
ax_.set_title("Cluster")
ax_ = ax[0, 1]
plot_reference_flow_on_grid(self, ax=ax_, scale=scale_for_pseudotime, show_background=show_background, s=s)
ax_.set_title("Development flow")
##
ax_ = ax[0, 2]
plot_simulation_flow_on_grid(self, ax=ax_, scale=scale_for_simulation, show_background=show_background, s=s)
ax_.set_title("Perturb simulation")
####
ax_ = ax[1, 0]
plot_inner_product_on_grid(self, ax=ax_, vm=vm,s=s_grid, show_background=show_background)
ax_.set_title("Inner product of \n Perturb simulation * Development flow")
ax_ = ax[1, 1]
plot_inner_product_on_pseudotime(self, ax=ax_, vm=vm, s=s_grid)
#####
ax_ = ax[1, 2]
plot_inner_product_as_box(self, ax=ax_, vm=vm)
#####
'''
def plot_legend(labels, palette, ax_):
for i, label in enumerate(labels):
ax_.scatter([0], [i], s=100, c=palette[label])
ax_.text(1, i-len(labels)*0.015, s=label)
ax_.set_ylim([-1, len(labels)])
ax_.set_xlim([-1, 10])
ax_.axis("off")
def plot_stackedvar(df, ax, palette=None):
bottom_feats=[]
if palette is None:
for i, j in enumerate(df.index.values):
if i==0:
ax.bar(df.columns.values, df.loc[j].values, edgecolor='white', label=j)
else:
ax.bar(df.columns.values, df.loc[j].values, label=j,
bottom=df.loc[bottom_feats].sum(axis=0).values,
edgecolor='white')
bottom_feats.append(j)
else:
for i, j in enumerate(df.index.values):
if i==0:
ax.bar(df.columns.values, df.loc[j].values,
edgecolor='white', color=palette[j], label=j)
else:
ax.bar(df.columns.values, df.loc[j].values, label=j, color=palette[j],
bottom=df.loc[bottom_feats].sum(axis=0).values,
edgecolor='white')
bottom_feats.append(j)
#plt.legend()
ax.set_xticks(df.columns)
'''
| 34.008197
| 183
| 0.650217
| 2,412
| 16,596
| 4.216418
| 0.075456
| 0.100492
| 0.033038
| 0.063324
| 0.818092
| 0.778073
| 0.755752
| 0.7235
| 0.705113
| 0.65821
| 0
| 0.01506
| 0.211798
| 16,596
| 487
| 184
| 34.078029
| 0.762403
| 0.009581
| 0
| 0.58885
| 0
| 0
| 0.100185
| 0.007615
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066202
| false
| 0
| 0.020906
| 0
| 0.087108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
78d93d0af1c181ee4e467789b8f79afbf69ae0fb
| 4,723
|
py
|
Python
|
jarbas/core/migrations/0002_add_indexes.py
|
vbarceloscs/serenata-de-amor
|
87e6c8932469478d177372ed7b6311cd66a71efe
|
[
"MIT"
] | 3,001
|
2016-08-08T17:43:36.000Z
|
2018-02-09T19:13:35.000Z
|
jarbas/core/migrations/0002_add_indexes.py
|
vbarceloscs/serenata-de-amor
|
87e6c8932469478d177372ed7b6311cd66a71efe
|
[
"MIT"
] | 259
|
2016-08-08T09:53:23.000Z
|
2018-01-24T13:30:27.000Z
|
jarbas/core/migrations/0002_add_indexes.py
|
vbarceloscs/serenata-de-amor
|
87e6c8932469478d177372ed7b6311cd66a71efe
|
[
"MIT"
] | 674
|
2016-08-08T22:04:04.000Z
|
2018-02-05T15:30:15.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-08 10:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='document',
name='applicant_id',
field=models.IntegerField(db_index=True, verbose_name='Applicant ID'),
),
migrations.AlterField(
model_name='document',
name='cnpj_cpf',
field=models.CharField(db_index=True, max_length=14, verbose_name='CNPJ or CPF'),
),
migrations.AlterField(
model_name='document',
name='congressperson_id',
field=models.IntegerField(db_index=True, verbose_name='Congressperson ID'),
),
migrations.AlterField(
model_name='document',
name='congressperson_name',
field=models.CharField(max_length=128, verbose_name='Congressperson name'),
),
migrations.AlterField(
model_name='document',
name='document_id',
field=models.IntegerField(db_index=True, verbose_name='Document ID'),
),
migrations.AlterField(
model_name='document',
name='document_number',
field=models.CharField(max_length=128, verbose_name='Document number'),
),
migrations.AlterField(
model_name='document',
name='document_type',
field=models.IntegerField(db_index=True, verbose_name='Document type'),
),
migrations.AlterField(
model_name='document',
name='document_value',
field=models.DecimalField(db_index=True, decimal_places=3, max_digits=10, verbose_name='Document value'),
),
migrations.AlterField(
model_name='document',
name='leg_of_the_trip',
field=models.CharField(max_length=128, verbose_name='Leg of the trip'),
),
migrations.AlterField(
model_name='document',
name='month',
field=models.IntegerField(db_index=True, verbose_name='Month'),
),
migrations.AlterField(
model_name='document',
name='net_value',
field=models.DecimalField(db_index=True, decimal_places=3, max_digits=10, verbose_name='Net value'),
),
migrations.AlterField(
model_name='document',
name='party',
field=models.CharField(db_index=True, max_length=16, verbose_name='Party'),
),
migrations.AlterField(
model_name='document',
name='passenger',
field=models.CharField(max_length=128, verbose_name='Passenger'),
),
migrations.AlterField(
model_name='document',
name='reimbursement_number',
field=models.IntegerField(db_index=True, verbose_name='Reimbursement number'),
),
migrations.AlterField(
model_name='document',
name='reimbursement_value',
field=models.DecimalField(db_index=True, decimal_places=3, max_digits=10, verbose_name='Reimbusrsement value'),
),
migrations.AlterField(
model_name='document',
name='remark_value',
field=models.DecimalField(db_index=True, decimal_places=3, max_digits=10, verbose_name='Remark value'),
),
migrations.AlterField(
model_name='document',
name='subquota_description',
field=models.CharField(max_length=128, verbose_name='Subquota descrition'),
),
migrations.AlterField(
model_name='document',
name='subquota_group_description',
field=models.CharField(max_length=128, verbose_name='Subquota group description'),
),
migrations.AlterField(
model_name='document',
name='subquota_group_id',
field=models.IntegerField(db_index=True, verbose_name='Subquota group ID'),
),
migrations.AlterField(
model_name='document',
name='subquota_number',
field=models.IntegerField(db_index=True, verbose_name='Subquote ID'),
),
migrations.AlterField(
model_name='document',
name='term',
field=models.IntegerField(db_index=True, verbose_name='Term'),
),
migrations.AlterField(
model_name='document',
name='year',
field=models.IntegerField(db_index=True, verbose_name='Year'),
),
]
| 37.484127
| 123
| 0.595596
| 460
| 4,723
| 5.891304
| 0.163043
| 0.132841
| 0.202952
| 0.235424
| 0.83321
| 0.824354
| 0.748708
| 0.468635
| 0.285978
| 0.156458
| 0
| 0.016408
| 0.290282
| 4,723
| 125
| 124
| 37.784
| 0.792064
| 0.014398
| 0
| 0.559322
| 1
| 0
| 0.165305
| 0.005589
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.016949
| 0.016949
| 0
| 0.042373
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
601bdffbbf4843bf38690eb7432cd9729c4672cc
| 27
|
py
|
Python
|
src/tools/__init__.py
|
Dou-Yu-xuan/pykinship
|
f81f6667fa08a08fe726736d05476168b2a3e2f0
|
[
"MIT"
] | 12
|
2020-02-19T02:50:49.000Z
|
2022-03-31T19:39:35.000Z
|
src/tools/__init__.py
|
Dou-Yu-xuan/pykinship
|
f81f6667fa08a08fe726736d05476168b2a3e2f0
|
[
"MIT"
] | 68
|
2020-03-23T00:07:28.000Z
|
2022-03-28T10:02:16.000Z
|
src/tools/__init__.py
|
Dou-Yu-xuan/pykinship
|
f81f6667fa08a08fe726736d05476168b2a3e2f0
|
[
"MIT"
] | 3
|
2020-02-11T19:07:08.000Z
|
2020-11-04T18:48:00.000Z
|
from src.tools.io import *
| 13.5
| 26
| 0.740741
| 5
| 27
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
607d5ee700f0bcae88812f0acbe40c70e2330c9c
| 143
|
py
|
Python
|
bayesian_framework/inference/validation_exceptions.py
|
AlexeyKonakov/state-space-estimation
|
ba30243211aded0db5b56ece134ab70d0b27ed03
|
[
"MIT"
] | null | null | null |
bayesian_framework/inference/validation_exceptions.py
|
AlexeyKonakov/state-space-estimation
|
ba30243211aded0db5b56ece134ab70d0b27ed03
|
[
"MIT"
] | null | null | null |
bayesian_framework/inference/validation_exceptions.py
|
AlexeyKonakov/state-space-estimation
|
ba30243211aded0db5b56ece134ab70d0b27ed03
|
[
"MIT"
] | null | null | null |
class CorruptedStateSpaceModelStructureException(Exception):
pass
class CorruptedStochasticModelStructureException(Exception):
pass
| 17.875
| 60
| 0.839161
| 8
| 143
| 15
| 0.625
| 0.216667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118881
| 143
| 7
| 61
| 20.428571
| 0.952381
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
6081937497a812038d3a6a3534f14048629a44ee
| 354
|
py
|
Python
|
rllab/envs/shadow_env.py
|
cathywu/rllab-multiagent
|
4b5758ff426a49a36e39bb94cee6c9d4caed1981
|
[
"MIT"
] | 12
|
2017-09-13T20:05:09.000Z
|
2021-09-06T17:00:32.000Z
|
rllab/envs/shadow_env.py
|
cathywu/rllab-multiagent
|
4b5758ff426a49a36e39bb94cee6c9d4caed1981
|
[
"MIT"
] | 3
|
2017-10-19T00:26:51.000Z
|
2017-11-30T21:00:54.000Z
|
rllab/envs/shadow_env.py
|
cathywu/rllab-multiagent
|
4b5758ff426a49a36e39bb94cee6c9d4caed1981
|
[
"MIT"
] | 4
|
2018-05-30T23:20:46.000Z
|
2019-03-13T09:32:51.000Z
|
import gym
class ShadowEnv(gym.Env):
def __init__(self, observation_space, action_space):
self._observation_space = observation_space
self._action_space = action_space
@property
def observation_space(self):
return self._observation_space
@property
def action_space(self):
return self._action_space
| 22.125
| 56
| 0.70904
| 41
| 354
| 5.682927
| 0.317073
| 0.343348
| 0.257511
| 0.16309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225989
| 354
| 15
| 57
| 23.6
| 0.850365
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.090909
| 0.181818
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
60a2ae86f6f382983b58704b55a89792e4074941
| 22,187
|
py
|
Python
|
web_helper/name_generator/japanese_name_generator.py
|
kittolau/selepy
|
b1efaa309fb5c43f7b95de17f1d891d5858f36f0
|
[
"MIT"
] | null | null | null |
web_helper/name_generator/japanese_name_generator.py
|
kittolau/selepy
|
b1efaa309fb5c43f7b95de17f1d891d5858f36f0
|
[
"MIT"
] | null | null | null |
web_helper/name_generator/japanese_name_generator.py
|
kittolau/selepy
|
b1efaa309fb5c43f7b95de17f1d891d5858f36f0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from web_helper.name_generator.abstract_name_generator import AbstractNameGenerator
class JapaneseNameGenerator(AbstractNameGenerator):
#name pool from http://fantasynamegenerators.com/japanese_names.php
namesMale = ["Akahito","Akeno","Aki","Akihiro","Akihisa","Akihito","Akikazu","Akinari","Akinori","Akio","Akio","Akira","Amane","Anzai","Arata","Arinori","Aritomo","Ashihei","Atasuke","Atshushi","Atsumichi","Atsumori","Atsutane","Azumamaro","Baiko","Bairei","Bakin","Basho","Benjiro","Benkei","Bokkai","Botan","Buncho","Bunjiro","Bunrakuken","Bunzo","Bussho","Chikafusa","Chikao","Chiko","Chojiro","Chomei","Chuichi","Dai","Daisetsu","Daisuke","Danjuro","Danno","Dayu","Denbe","Doi","Dokuohtei","Doppo","Ebizo","Eichi","Eichiro","Eien","Eiichi","Eiji","Eijiro","Eikichi","Eisaku","Eisen","Eishi","Eisuke","Eitoku","Eizan","Eizo","Ekiken","Ennosuke","Etsuya","Fujimaro","Fujio","Fukusaburu","Fumiaki","Fumihiko","Fumihiro","Fumimaro","Fumio","Gaho","Gekko","Gempachi","Gengyo","Genichi","Genjo","Gennai","Gennosuke","Genpaku","Gesshin","Gidayu","Gihei","Giichi","Gonkuro","Gonshiro","Goro","Gyokusho","Gyukudo","Hachemon","Hachigoro","Hachiro","Hajime","Hakuseki","Hanshiro","Haranobu","Haru","Haru","Haruhiko","Haruhiro","Haruki","Haruko","Harumi","Harunobu","Hayato","Heihachiro","Heiji","Heikichi","Heizo","Hideaki","Hidehira","Hidekazu","Hideki","Hidemichi","Hideo","Hidetada","Hidetora","Hidetoshi","Hidetsugu","Hideyori","Hideyoshi","Higashikuni","Hikaru","Hikosaburo","Hikozaemon","Hiro","Hiroaki","Hirobumi","Hirofumi","Hiroharu","Hirohisa","Hiroji","Hirokazu","Hirokichi","Hirokumi","Hiroshi","Hiroshige","Hirotada","Hirotaka","Hirotsugu","Hiroya","Hiroyasu","Hiroyuki","Hisahsi","Hisaki","Hisamitsu","Hisanobu","Hisashi","Hisato","Hisayuki","Hitomaro","Hitoshi","Hogai","Hoitsu","Hokichi","Hokusai","Honzo","Horiuchi","Hoshi","Hoshiko","Hyobe","Hyosuke","Hyotaru","Ichibei","Ichiro","Ichisake","Ichiyo","Ichizo","Iemitsu","Iemochi","Ienobu","Iesada","Ieshige","Ietsuna","Ieyasu","Ieyoshi","Ikemoto","Ikki","Ikku","Inejiro","Ippei","Isamu","Isao","Isei","Isoruko","Isoshi","Iwane","Iwao","Izo","Izumo","Jakuchu","Jin","Jinzaburo","Jiro","Jo","Joben","Joji","Jomei","Josuke","Jotaro","Jou","Juichi","Jun","Junichi","Junichiro","Junji","Junnosuke","Junzo","Juro","Jurobei","Juzaburo","Juzo","Kado","Kadonomaro","Kaemon","Kafu","Kagehisa","Kagetoki","Kageyasu","Kaii","Kakuei","Kakuzo","Kamatari","Kamlyn","Kan","Kanbe","Kane","Kaneie","Kanezane","Kanjiro","Kanko","Kannon","Kano","Kansuke","Kantaro","Kanzaburo","Kaori","Kaoru","Kata","Katai","Katsuhiko","Katsuhito","Katsumi","Katsumoto","Katsunan","Katsunosuki","Katsuyoshi","Katsuyuki","Katzumi","Kawanari","Kazu","Kazuhiko","Kazuhiro","Kazuki","Kazuko","Kazuma","Kazunori","Kazuo","Kazushi","Kazushige","Kazutoshi","Kazuyuki","Kei","Keiji","Keiki","Keishi","Keisuke","Keita","Keitaro","Keizo","Ken","Kenichi","Kenji","Kenji","Kenji","Kenjiro","Kenkichi","Kenko","Kensaku","Kenshin","Kentaro","Kenzaburo","Kenzan","Kenzo","Kichibei","Kichisaburo","Kiemon","Kiichi","Kijuro","Kikaku","Kikugoro","Kikunojo","Kimi","Kiminobu","Kimitada","Kin","Kingo","Kinji","Kinmochi","Kinnojo","Kinnosuke","Kinzo","Kioshi","Kisho","Kitahachi","Kiyoemon","Kiyohira","Kiyohisa","Kiyomasu","Kiyomori","Kiyonaga","Kiyonobu","Kiyonori","Kiyoshi","Kiyotaka","Koan","Kobo","Koetsu","Kohei","Koichi","Koin","Koji","Kojiro","Kojuro","Kokan","Kokei","Koki","Kokushi","Konosuke","Konoye","Konyo","Korechika","Korekiyo","Korenaga","Korin","Koryusai","Kosaku","Kosami","Koshiro","Kosho","Kotaro","Koto","Koyo","Kozue","Kuemon","Kuma","Kumanosuke","Kuniaki","Kunihiko","Kunimatsu","Kunimichi","Kunio","Kunisada","Kunitaro","Kuniyoshi","Kuniyuki","Kuri","Kyoden","Kyoichi","Kyoji","Kyoshi","Kyuichi","Kyushichi","Kyuso","Kyuwa","Mabuchi","Magobei","Magohachi","Makoto","Mamoru","Manabu","Manobu","Manzo","Mareo","Maresuke","Marihito","Maris","Marise","Maro","Masaaki","Masafumi","Masaharu","Masahide","Masahiko","Masahiro","Masakado","Masakazu","Masaki","Masami","Masamichi","Masamune","Masanobu","Masanori","Masao","Masaru","Masashi","Masashige","Masatake","Masato","Masayoshi","Masayuki","Masazumi","Mashai","Mashashi","Mashiro","Masu","Masuhiro","Masujiro","Masutaro","Matabei","Matashichi","Matsu","Matsudaira","Matsuo","Matsusuke","Matsuta","Matsuyo","Meiji","Michihiro","Michinaga","Michinori","Michio","Michizane","Mieko","Miki","Mikio","Minoru","Misao","Mito","Mitsuharu","Mitsuhide","Mitsukuni","Mitsunari","Mitsuo","Mitsuoki","Mitsuzuka","Miyazaki","Miyoko","Mobumasu","Mochihito","Mokichi","Mokuami","Momoru","Montaro","Monzaemon","Morie","Morihiro","Morimasa","Morio","Moromao","Moronobu","Motoichi","Motoki","Motonobu","Motoshige","Mototsune","Motoyasu","Motoyuki","Munemitsu","Munemori","Munenori","Muneyaki","Munoto","Murai","Mushanokoji","Mutsohito","Naganori","Naizen","Nakamaro","Nakazo","Namboku","Nampo","Naoaki","Naofumi","Naohiro","Naoki","Naoko","Naomichi","Naonobu","Naosuke","Naoya","Naozane","Narahiko","Nariaki","Nariakira","Narihari","Narihira","Naruhiko","Natsu","Natsume","Natsuo","Nichiren","Nikki","Nikko","Ninsei","Niou","Nissho","Noboru","Nobuatsu","Nobuharu","Nobuhiko","Nobuhisa","Nobuhito","Nobukazu","Nobuo","Noburo","Nobusuke","Nobuyoki","Nobuyori","Nobuyoshi","Nori","Noriaki","Norihide","Norihisa","Norinaga","Norio","Norishige","Noritada","Noritoshi","Noriyori","Noriyuki","Norogumi","Oda","Ogai","Okakura","Okitsugu","Okura","Okyoito","Omezo","Oniji","Orinosuke","Osamu","Otojiro","Rai","Raidon","Razan","Rei","Reijiro","Reizo","Renjiro","Renzo","Rikiya","Rikyu","Ringo","Rinji","Rintaro","Rkuemon","Robun","Roka","Roku","Rosanjin","Ryo","Ryobe","Ryoichi","Ryoko","Ryoma","Ryosei","Ryozo","Ryu","Ryuichi","Ryunosuke","Ryushi","Ryutaro","Ryuzaburo","Saburo","Sachi","Sachio","Sadaharu","Sadahige","Sadakuno","Sadanobu","Sadao","Sadatake","Sadayoshi","Saemon","Saikaku","Saionji","Sakutaro","Samba","Saneatsu","Sanetomo","Sanjiro","Sanjuro","Sanraku","Sanzo","Satoru","Satoshi","Sawao","Seibei","Seiesnsui","Seihachi","Seiho","Seiichi","Seiji","Seika","Seiki","Seinosuke","Seiryo","Seishiro","Seishisai","Seison","Seitaro","Sekien","Sen","Senichi","Senzo","Sessue","Settan","Sharaku","Shiba","Shichirobei","Shigeaki","Shigekazu","Shigeki","Shigeko","Shigemasa","Shigematsu","Shigemori","Shigenaga","Shigenobu","Shigeru","Shigetaka","Shigetoki","Shigochiyo","Shihei","Shihi","Shijo","Shiki","Shiko","Shimei","Shimpei","Shingen","Shinichi","Shinji","Shinkichi","Shino","Shinobu","ShinriKiyaru","Shinsaku","Shinsui","Shintaro","Shinzaburo","Shinzo","Shirai","Shiro","Shirosama","Shizue","Sho","Shoda","Shogo","Shohei","Shoichi","Shoin","Shoji","Shojiro","Shoko","Shoraku","Shosuke","Shotaro","Shoyo","Shozaburo","Shozo","Shuichi","Shuji","Shukishi","Shuko","Shumei","Shumkichi","Shun","Shun�en","Shuncho","Shungyosai","Shunichi","Shunji","Shunko","Shunmyo","Shunsen","Shunsho","Shunso","Shunsuke","Shusake","Shusaku","Shusui","Shuzo","Soetsu","Sofu","Soh","Soichiro","Sojuro","Sorai","Sosa","Soseki","Soshitsu","Soshu","Sosuke","Sotan","Sotaro","Sotatsu","Sozen","Sozui","Ssekien","Subaru","Suezo","Sugimoto","Sugita","Sukejuro","Sukenobu","Suketsune","Sukeyasu","Sumio","Sumiteru","Sumitomo","Susumu","Suzu","Suzu","Tabito","Tadahisa","Tadakuni","Tadamasa","Tadamichi","Tadao","Tadashi","Tadasu","Tadasuke","Tadataka","Tadayoshi","Tadayuki","Tadiyuki","Taheiji","Taikan","Taisho","Taisuke","Taji","Takaaki","Takafumi","Takahashi","Takahiro","Takakazu","Takamasa","Takamori","Takamuku","Takanibu","Takanobu","Takanori","Takao","Takashi","Takauji","Takayuki","Takechi","Takehide","Takeichi","Takeji","Takejiro","Takenao","Takeo","Takeru","Takeshi","Takesi","Taki","Takiji","Takuboku","Takuji","Takuma","Takuro","Takuya","Tamasaburo","Tamasine","Tameyoshi","Tamotsu","Tamuramaro","Tanak","Tango","Tanjiro","Tanosuke","Tanyu","Tanzan","Taro","Taro","Taroemon","Tarozaemon","Tashiaki","Tashiro","Tasuku","Tatsui","Tatsukichi","Tatsuya","Tatsuzo","Taysuke","Teiji","Teijo","Teika","Teiljo","Teinosuke","Tekkan","Tenshin","Terao","Teriuihi","Terumoto","Teruo","Tessai","Tetsu","Tetsuhiko","Tetsui","Tetsunori","Tetsuo","Tetsuya","Tetsuyuki","Tetsuzan","Thoki","Tobei","Togai","Tohaku","Toichi","Toin","Toju","Tokaji","Toki","Tokichiro","Tokimasa","Tokimune","Tokugawa","Tokuhei","Tokuma","Tokutomi","Tomeo","Tomiichi","Tomiji","Tomoaki","Tomohiko","Tomokazu","Tomomi","Tomoyuki","Ton","Torajiro","Torazo","Torio","Toru","Toshi","Toshiaki","Toshiharu","Toshikasu","Toshikazu","Toshiki","Toshikuni","Toshimichi","Toshinobu","Toshiro","Toshitsugu","Toshiyuki","Toson","Totoya","Toyoaki","Toyoharu","Toyokazu","Toyokuni","Toyonobu","Toyoshige","Toyotomi","Toyozo","Tsugahara","Tsugiharu","Tsuginori","Tsugumichi","Tsukasa","Tsumemasa","Tsunayoshi","Tsuneari","Tsuneo","Tsunesaburo","Tsuneyo","Tsuramatsu","Tsurayaki","Tsuruki","Tsutomu","Tsuyoshi","Udo","Ukon","Ukyo","Unkei","Utaemon","Utamara","Utamuro","Utemaro","Waotaka","Washi","Washichi","Yachi","Yaichiro","Yajirobei","Yakamochi","Yakumo","Yamato","Yasotaro","Yasuhide","Yasuhiko","Yasuhiro","Yasujiro","Yasukazu","Yasunari","Yasunobu","Yasuo","Yasuoka","Yasushi","Yasutake","Yasutoki","Yasuyuki","Yataro","Yatsuhiro","Yodo","Yohachi","Yoichi","Yoichibei","Yoriie","Yorikane","Yoringa","Yoritoki","Yoritomo","Yoriyoshi","Yoriyuki","Yosai","Yoshi","Yoshiaga","Yoshiaki","Yoshida","Yoshifumi","Yoshifusa","Yoshihide","Yoshihiro","Yoshihisa","Yoshihito","Yoshii","Yoshiiku","Yoshikazu","Yoshiki","Yoshimasa","Yoshimatsu","Yoshimi","Yoshimitsu","Yoshimochi","Yoshimune","Yoshinaka","Yoshino","Yoshinobu","Yoshinori","Yoshio","Yoshisada","Yoshitaka","Yoshitake","Yoshiteru","Yoshitoki","Yoshitomo","Yoshitora","Yoshitoshi","Yoshitsune","Yoshiyuki","Yoson","Yosuke","Yozo","Yugoro","Yuichi","Yuifum","Yuji","Yujiro","Yuki","Yukichi","Yukinaga","Yukio","Yuko","Yunosuke","Yushiro","Yusuke","Yutaka","Zenko","Zeshin"];
namesFemale = ['Abi','Ado','Aeko','Aemi','Agasa','Ai','Aiga','Aiko','Aira','Aisa','Aishun','Akae','Akako','Akane','Akasuki','Akemi','Akeno','Aki','Akiko','Akina','Akio','Akira','Akirako','Akko','Ako','Akoto','Akuro','Amarante','Amari','Amaya','Ami','Anbi','Anda','Ane','Aneka','Aneko','Anii','Anju','Ano','Anri','Ao','Aoba','Aoi','Aomi','Aoki','Are','Arei','Ari','Arisa','Arisu','Asa','Asahi','Asaji','Asako','Asami','Asuka','Atsuko','Au','Aya','Ayaka','Ayako','Ayame','Ayano','Ayao','Ayase','Aye','Ayuka','Ayuko','Ayumi','Ayuri','Azami','Azumi','Azusa','Bachiko','Beni','Benten','Chiaki','Chie','Chieko','Chifumi','Chifuyu','Chigusa','Chiharu','Chihiro','Chiho','Chihoko','Chihomi','Chihori','Chika','Chikage','Chikako','Chikaze','Chiko','Chikuma','Chima','Chimaki','Chimako','Chimari','China','Chinami','Chinatsu','Chino','Chio','Chisa','Chisato','Chise','Chisuzu','Chitako','Chitose','Chiyako','Chiyeko','Chiyo','Chiyu','Chiyuki','Chiyumi','Chiyuri','Chizu','Chizuru','Cho','Dai','Den','Dori','Eari','Eba','Echiko','Egao','Eho','Ei','Eiko','Eimu','Eire','Eitsu','Eka','Ema','Emao','Eme','Emi','Emiho','Emika','Emiko','Emina','Emio','Emiri','Emiya','Emu','En','Ena','Enko','Enri','Ere','Eren','Erena','Eri','Erika','Eriko','Erisa','Erisu','Erize','Eru','Esumi','Etsu','Etsuko','Euiko','Fuji','Fujiko','Fukami','Fuki','Fukuko','Fumi','Fumie','Fumika','Fumiki','Fumiko','Fumiya','Fusa','Fusae','Futaba','Fuuko','Fuyu','Fuyuko','Fuyume','Gemmei','Gen','Gin','Gina','Ginko','Hagino','Hainako','Hairi','Hako','Hama','Hami','Han','Hana','Hanae','Hanako','Haniko','Haru','Harui','Haruka','Harukichi','Haruko','Harumi','Hasumi','Hatomi','Hatsu','Hatsue','Hatsuka','Hatsumi','Hatsune','Hatsuyo','Haya','Hazuki','Hibari','Hide','Hideko','Hikari','Hikaru','Himeka','Himeko','Hina','Hinako','Hirari','Hiro','Hiroe','Hiroko','Hiromi','Hiroshi','Hisa','Hisae','Hisako','Hisano','Hitomi','Hitomo','Hitoshi','Hizuru','Homi','Homugi','Hona','Honami','Honoka','Honomi','Hoshi','Hoshie','Hoshiko','Hoshiyo','Hosuzu','Hotaru','Hozumi','Iako','Ibu','Ichi','Ichie','Ichiha','Ichiho','Ichiko','Ichino','Ie','Iena','Iho','Ihono','Iki','Ikoi','Iku','Ikue','Ikuko','Ikumu','Ima','Imari','Ina','Inaho','Inari','Ine','Ino','Inoue','Io','Ioko','Iona','Iori','Irisa','Isa','Isaki','Isako','Isami','Isamu','Ise','Ishi','Isoko','Isono','Isuzu','Ito','Itsuko','Iwa','Iyo','Iyona','Izumi','Jin','Jori','Joruri','Jun','June','Junko','Juri','Juria','Kadiri','Kae','Kaede','Kagami','Kaho','Kahori','Kahoru','Kai','Kaida','Kaiya','Kaiyo','Kameko','Kako','Kama','Kame','Kami','Kamie','Kaminari','Kamlyn','Kana','Kane','Kaneko','Kaneru','Kanna','Kanon','Kao','Kaori','Kaoru','Karen','Karin','Kasumi','Kata','Katsu','Katsue','Katsuko','Katsumi','Kawa','Kaya','Kayami','Kayo','Kayoko','Kayu','Kazashi','Kazu','Kazue','Kazuko','Kazumi','Kei','Keiko','Kichi','Kiho','Kii','Kiko','Kiku','Kikyo','Kimi','Kimie','Kimiko','Kin','Kina','Kino','Kinu','Kinuko','Kinuye','Kinuyo','Kioko','Kioshi','Kirari','Kiri','Kisa','Kishi','Kit','Kita','Kiwa','Kiyo','Kiyoko','Kiyomi','Kiyoshi','Kiyumi','Ko','Kochiyo','Kofuyu','Kohana','Koharu','Koi','Koiso','Koken','Koko','Koma','Komachi','Kome','Komi','Konami','Konatsu','Koneko','Konomi','Koto','Kotone','Kouko','Koume','Kozakura','Kozue','Kukiko','Kuma','Kumi','Kumiko','Kumiyo','Kuni','Kunie','Kuniko','Kura','Kurea','Kuri','Kurumi','Kuwa','Kyoko','Kyouka','Kyouko','Kyoumi','Leiko','Machi','Machiko','Madoka','Mae','Maeko','Maemi','Mai','Maiko','Maiya','Maki','Makiko','Mako','Mami','Mamiko','Man','Mana','Manami','Mari','Mariko','Marise','Maru','Masa','Masae','Masago','Masako','Masu','Masumi','Matsu','Matsuko','Maya','Mayako','Mayo','Mayoko','Mayuko','Mayumi','Megu','Megumi','Michi','Michie','Michiko','Michiru','Midori','Mie','Mieko','Miho','Mihoko','Miiko','Mika','Mikazuki','Miki','Mikka','Miliko','Mina','Minako','Mine','Mineko','Mino','Mio','Misa','Misaki','Misako','Misao','MisatoMitsu','Misoka','Mitsu','Mitsuko','Mitsuyo','Miu','Miwa','Miwako','Miya','Miyako','Miyo','Miyoko','Miyoshi','Miyu','Miyuki','Mizuki','Mizuko','Moanna','Moe','Momoko','Mon','Mori','Morie','Moto','Muika','Mura','Murasaki','Mutsuko','Mutsumi','Nadeshiko','Nagisa','Naho','Nahoko','Nami','Namie','Namika','Namiko','Namiyo','Nana','Nanako','Nanami','Nanao','Nanase','Nao','Naoko','Naomi','Naora','Nara','Nari','Nariko','Narumi','Natsu','Natsuko','Natsumi','Nayoko','Nene','Nikki','Nishi','Nomi','Nori','Norie','Noriko','Nozomi','Nui','Nyoko','Ochiyo','Ogi','Oharu','Oki','Okichi','Okiku','Okimi','Ome','Omitsu','Ori','Oriana','Orika','Orime','Orimi','Orino','Osami','Osen','Otoe','Otome','Otsu','Otsune','Poemu','Rai','Raicho','Raira','Raku','Ran','Rea','Rebun','Rei','Reichiru','Reiko','Reina','Reira','Reisa','Remi','Remon','Ren','Rena','Rie','Rieko','Riho','Rii','Rika','Rikako','Riko','Riku','Rin','Rina','Rinako','Ringo','Rini','Rino','Rio','Rira','Ririko','Risa','Risako','Rise','Risu','Ritsuko','Roku','Rokuko','Romi','Rui','Ruka','Ruma','Rumi','Rumiko','Runa','Ruri','Ruriko','Ruru','Ryo','Ryoko','Ryou','Ryu','Sachi','Sachiko','Sada','Sadako','Sae','Saeko','Sai','Saika','Saito','Sakae','Sakamae','Saki','Sakiko','Sako','Sakue','Sakuko','Sakura','Sakurako','Sakuro','Sama','Sanako','Saori','Sata','Sato','Satoko','Satomi','Satsu','Satsuki','Satu','Sawa','Sawako','Saya','Sayo','Sayoko','Sayomi','Sayuri','Sei','Seiko','Seka','Seki','Sen','Setsu','Setsuko','Shige','Shiho','Shihobu','Shika','Shina','Shino','Shinobu','Shioko','Shiori','Shirushi','Shizu','Shizue','Shizuka','Shoken','Shoko','Shungiku','Sugi','Sui','Sukey','Suki','Suko','Sumi','Sumiko','Sute','Suzu','Suzue','Suzuki','Suzuko','Suzume','Tadako','Tae','Tai','Taira','Taji','Taka','Takako','Takara','Take','Taki','Tama','Tamae','Tamafune','Tamaki','Tamami','Tame','Tami','Tamika','Tamiko','Tamiyo','Tanak','Tanaka','Tane','Tani','Taniko','Tansho','Tara','Taree','Taru','Tatsu','Tatsumi','Taura','Taya','Tazu','Teruyo','Tetsu','Toki','Tokie','Tokiko','Tokiwa','Tokiyo','Toku','Tomi','Tomiju','Tomiko','Tomo','Tomoe','Tomoko','Tomomi','Tooka','Tora','Tori','Toshi','Toshie','Toshiko','Toya','Toyoko','Tsugi','Tsuki','Tsukiko','Tsukiyama','Tsuna','Tsuru','Tsuya','Ui','Uka','Uki','Ume','Umeka','Umeki','Umeko','Umi','Una','Uno','Urako','Urano','Urara','Urena','Urumi','Usagi','Usami','Uta','Utako','Utsuwa','Utzuki','Uzuki','Waka','Wakaba','Wakako','Wakana','Wakano','Wakayo','Waki','Wakiko','Wako','Wakumi','Wakuri','Wami','Warabi','Wattan','Wayoko','Wazuka','Yachi','Yae','Yaeko','Yama','Yasu','Yasuko','Yasumu','Yatsumi','Yawara','Yayoi','Yei','Yo','Yodo','Yoi','Yoka','Yokkako','Yoko','Yone','Yori','Yoriko','Yoshe','Yoshi','Yoshike','Yoshiko','Yoshino','You','Yu','Yufu','Yui','Yuka','Yukako','Yukari','Yuki','Yukiji','Yukika','Yukiko','Yukiyo','Yuko','Yuma','Yumako','Yume','Yumeji','Yumi','Yumia','Yumiko','Yumisa','Yuna','Yuno','Yura','Yuri','Yuria','Yuriko','Yurisa','Yuro','Yusa','Yusuke','Yutori','Yutsuko','Yuuko','Yuyu'];
namesFamily = ["Abe","Adachi","Akagi","Akamine","Aki","Akiyama","Amano","Amari","Amaya","Ando","Anno","Anzai","Aoki","Aoyama","Arai","Arakaki","Arakawa","Araki","Arata","Araya","Arima","Arita","Asa","Asai","Asano","Asato","Ashikaga","Azuma","Baba","Ban","Bando","Chiba","Chinen","Chino","Date","Doi","Domen","Eguchi","Endo","Enomoto","Eto","Fujii","Fujikawa","Fujimori","Fujimoto","Fujimura","Fujino","Fujioka","Fujita","Fujiwara","Fukuda","Fukuhara","Fukui","Fukumoto","Fukunaga","Fukushima","Funai","Furukawa","Furuta","Furutani","Furuya","Fuse","Gima","Go","Goda","Goto","Goya","Hada","Haga","Hagiwara","Hamada","Hamamoto","Hamasaki","Handa","Hano","Hara","Harada","Hase","Hasegawa","Hashimoto","Hata","Hatanaka","Hattori","Hayakawa","Hayashi","Hayashida","Higa","Higashi","Higuchi","Hino","Hirabayashi","Hirai","Hirano","Hiraoka","Hirata","Hirayama","Hironaka","Hirose","Hirota","Hoga","Hokama","Honda","Hora","Hori","Horie","Horiuchi","Hoshino","Ichikawa","Ida","Ide","Igarashi","Ige","Iha","Iida","Ike","Ikeda","Ikehara","Imada","Imai","Imamura","Inaba","Inouye","Isa","Iseri","Ishibashi","Ishida","Ishihara","Ishii","Ishikawa","Ishimoto","Isobe","Ito","Itoh","Iwai","Iwamoto","Iwasaki","Iwata","Izumi","Jin","Jo","Juba","Kaba","Kagawa","Kai","Kajiwara","Kamei","Kamiya","Kanai","Kanda","Kaneko","Kanemoto","Kaneshiro","Kanno","Kano","Kasai","Kase","Kataoka","Katayama","Kato","Kawabata","Kawaguchi","Kawahara","Kawai","Kawakami","Kawamoto","Kawamura","Kawano","Kawasaki","Kawashima","Kawata","Kaya","Kibe","Kida","Kido","Kikuchi","Kimoto","Kimura","Kinoshita","Kishi","Kishimoto","Kita","Kitagawa","Kitamura","Kiyabu","Kobashigawa","Kobayashi","Kobe","Koda","Kodama","Koga","Koike","Koizumi","Kojima","Komatsu","Kon","Konda","Kondo","Konishi","Konno","Kono","Konya","Koyama","Koyanagi","Kuba","Kubo","Kubota","Kudo","Kumagai","Kuno","Kuramoto","Kurata","Kure","Kurihara","Kuroda","Kurokawa","Kuse","Kusumoto","Kuwahara","Machi","Machida","Mae","Maeda","Maekawa","Maita","Maki","Makino","Mano","Maruyama","Masaki","Mase","Masuda","Matsubara","Matsuda","Matsui","Matsumoto","Matsumura","Matsunaga","Matsuno","Matsuo","Matsuoka","Matsushima","Matsushita","Matsuura","Matsuyama","Matsuzaki","Mayeda","Mihara","Mikami","Miki","Minami","Minamoto","Mino","Mita","Miura","Miya","Miyagawa","Miyahara","Miyahira","Miyake","Miyamoto","Miyasaki","Miyasato","Miyashiro","Miyashita","Miyata","Miyazaki","Miyoshi","Mizuno","Mochizuki","Mori","Morikawa","Morimoto","Morine","Morino","Morioka","Morishige","Morishita","Morita","Moriyama","Mukai","Mura","Murai","Murakami","Muramoto","Muranaka","Murano","Muraoka","Murata","Murayama","Muto","Nagai","Nagamine","Nagano","Nagao","Nagasawa","Nagata","Naito","Nakada","Nakagawa","Nakahara","Nakai","Nakajima","Nakama","Nakamoto","Nakamura","Nakanishi","Nakano","Nakao","Nakashima","Nakasone","Nakata","Nakatani","Nakatomi","Nakayama","Nakazawa","Namba","Nii","Nishi","Nishida","Nishihara","Nishikawa","Nishimoto","Nishimura","Nishioka","Nishiyama","Nitta","Niwa","No","Noda","Noguchi","Nomura","Nonaka","Noya","Oba","Obara","Obi","Oda","Oe","Ogasawara","Ogata","Ogawa","Ogino","Ogura","Oh","Ohara","Ohashi","Ohta","Oishi","Oka","Okabe","Okada","Okamoto","Okamura","Okane","Okano","Okawa","Okazaki","Oki","Okimoto","Okino","Okita","Okubo","Okuda","Okuma","Okumura","Okura","Omori","Omura","Onaga","Onishi","Ono","Orio","Osada","Osaki","Ose","Oshima","Oshiro","Oshita","Ota","Otake","Otani","Otsuka","Ouchi","Oyama","Oye","Ozaki","Ozawa","Sada","Sadow","Saeki","Saiki","Saito","Sakaguchi","Sakai","Sakamoto","Sakata","Sako","Sakuma","Sakurai","Sama","Sanda","Sando","Sano","Sasaki","Sato","Satow","Sawa","Sawada","Sawaya","Sazama","Seki","Sekiguchi","Seno","Seo","Sera","Seta","Seto","Shiba","Shibata","Shibuya","Shima","Shimabukuro","Shimada","Shimamoto","Shimizu","Shimoda","Shimomura","Shinohara","Shinsato","Shintani","Shirai","Shiraishi","Shiraki","Shiro","Shiroma","Shishido","Shoda","Shoji","Soda","Soga","Soma","Sone","Sonoda","Suda","Sugai","Sugawara","Sugihara","Sugimoto","Sugita","Sugiyama","Suko","Sumida","Sunada","Suto","Suzuki","Tabata","Tachibana","Tada","Tagawa","Taguchi","Tahara","Taira","Tajima","Takagi","Takahashi","Takai","Takaki","Takamoto","Takano","Takara","Takashima","Takata","Takayama","Takeda","Takei","Takemoto","Takenaka","Takeshita","Taketa","Takeuchi","Tamaki","Tamanaha","Tamashiro","Tamura","Tanabe","Tanaka","Tani","Tanigawa","Taniguchi","Tanimoto","Tanji","Tano","Tao","Tashiro","Tengan","Terada","Teramoto","Teruya","Teshima","Tobe","Toda","Tokuda","Tokunaga","Toma","Tominaga","Tomita","Tone","Toyama","Toyoda","Tsuchida","Tsuchiya","Tsuda","Tsuji","Tsukamoto","Tsutsui","Tsutsumi","Uchida","Uchiyama","Ueda","Uehara","Uemura","Ueno","Umeda","Umemoto","Uno","Usui","Uyeda","Uyehara","Uyemura","Uyeno","Wada","Wakabayashi","Watanabe","Yagi","Yamada","Yamagata","Yamaguchi","Yamakawa","Yamamoto","Yamamura","Yamanaka","Yamane","Yamaoka","Yamasaki","Yamashiro","Yamashita","Yamauchi","Yamazaki","Yanagi","Yano","Yasuda","Yasui","Yasutake","Yogi","Yokota","Yokoyama","Yonamine","Yoneda","Yoshida","Yoshihara","Yoshikawa","Yoshimoto","Yoshimura","Yoshinaga","Yoshino","Yoshioka"];
def __init__(self):
super(JapaneseNameGenerator, self).__init__()
def getMaleName(self):
return self.nameGen(self.namesMale,self.namesFamily)
def getFemaleName(self):
return self.nameGen(self.namesFemale,self.namesFamily)
| 1,056.52381
| 9,626
| 0.666967
| 2,471
| 22,187
| 5.983812
| 0.930393
| 0.001758
| 0.001894
| 0.002841
| 0.003382
| 0
| 0
| 0
| 0
| 0
| 0.000045
| 0.000045
| 0.004327
| 22,187
| 20
| 9,627
| 1,109.35
| 0.669232
| 0.004868
| 0
| 0
| 0
| 0
| 0.651296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.090909
| 0.181818
| 0.909091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
715622a86c880fcf033d3d26a1ee07d4505d212c
| 160
|
py
|
Python
|
boto1/admin.py
|
raffienficiaud/django_mturk_minimalistic
|
89468b0c3fa87ce3ea7cd97e5cfd4427b21f39a0
|
[
"MIT"
] | 10
|
2015-06-18T04:13:42.000Z
|
2020-07-10T23:16:22.000Z
|
boto1/admin.py
|
raffienficiaud/django_mturk_minimalistic
|
89468b0c3fa87ce3ea7cd97e5cfd4427b21f39a0
|
[
"MIT"
] | null | null | null |
boto1/admin.py
|
raffienficiaud/django_mturk_minimalistic
|
89468b0c3fa87ce3ea7cd97e5cfd4427b21f39a0
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from boto1.models import Image, Hit, Result
admin.site.register(Image)
admin.site.register(Hit)
admin.site.register(Result)
| 17.777778
| 43
| 0.8
| 24
| 160
| 5.333333
| 0.5
| 0.210938
| 0.398438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006944
| 0.1
| 160
| 8
| 44
| 20
| 0.881944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
715ed59b2712b4e0924caa857e826874b94f2891
| 2,609
|
py
|
Python
|
examples/draw_a_cat.py
|
jontonsoup4/ascii_art
|
f36e0724c8e1c94631c673e2d9803a1861ea5bed
|
[
"MIT"
] | 199
|
2015-11-12T15:56:51.000Z
|
2021-09-28T04:34:50.000Z
|
examples/draw_a_cat.py
|
data-science-z/ascii_art
|
28787cbc0459529259f0906f6e732f66616c2709
|
[
"MIT"
] | 2
|
2015-11-13T13:22:22.000Z
|
2016-12-19T15:15:30.000Z
|
examples/draw_a_cat.py
|
jontonsoup4/ascii_art
|
f36e0724c8e1c94631c673e2d9803a1861ea5bed
|
[
"MIT"
] | 31
|
2015-11-13T03:59:05.000Z
|
2022-03-19T02:07:23.000Z
|
from ascii_art.ascii_art import ASCIIArt, ASCIIPicture
# ASCII drawing
picture = ASCIIArt('cat', 2).draw_ascii(curve=1)
ASCIIPicture(picture).save('cat_scale2_draw_ascii.png')
with open('cat_scale2_draw.txt', 'w') as f:
f.write(''.join(picture))
picture = ASCIIArt('cat', 5).draw_ascii(curve=1)
ASCIIPicture(picture).save('cat_scale5_draw_ascii.png')
with open('cat_scale5_draw.txt', 'w') as f:
f.write(''.join(picture))
# Colored ASCII drawing using sorted custom character sets on a black background
colored_picture = ASCIIArt('cat', 2).draw_color_ascii(ASCIIArt.sort('09215'))
ASCIIPicture(colored_picture, 'black').save('cat_scale2_color_numbers')
colored_picture = ASCIIArt('cat', 5).draw_color_ascii(ASCIIArt.sort('09215'))
ASCIIPicture(colored_picture, 'black').save('cat_scale5_color_numbers')
colored_picture = ASCIIArt('cat', 2).draw_color_ascii(ASCIIArt.sort('jontonsoup4'))
ASCIIPicture(colored_picture, 'black').save('cat_scale2_color_name')
colored_picture = ASCIIArt('cat', 5).draw_color_ascii(ASCIIArt.sort('jontonsoup4'))
ASCIIPicture(colored_picture, 'black').save('cat_scale5_color_name')
# ASCII to HTML using 'kitten' as a character set on a black background
html = ASCIIArt('cat', 1).draw_html(ASCIIArt.sort('kitten'), background_color='black')
with open('cat_scale1_html_kitten.html', 'w') as f:
f.write(''.join(html))
html = ASCIIArt('cat', 2).draw_html(ASCIIArt.sort('kitten'), background_color='black')
with open('cat_scale2_html_kitten.html', 'w') as f:
f.write(''.join(html))
# ASCII to HTML using only '#' on a black background
html = ASCIIArt('cat', 1).draw_html(ASCIIArt.BLOCK, background_color='black')
with open('cat_scale1_html_block.html', 'w') as f:
f.write(''.join(html))
html = ASCIIArt('cat', 2).draw_html(ASCIIArt.BLOCK, background_color='black')
with open('cat_scale2_html_block.html', 'w') as f:
f.write(''.join(html))
# Colored ASCII with only '#' on a black background
colored_picture = ASCIIArt('cat', 2).draw_color_ascii(ASCIIArt.BLOCK, curve=1.5)
ASCIIPicture(colored_picture, 'black').save('cat_scale2_block_color.png')
colored_picture = ASCIIArt('cat', 5).draw_color_ascii(ASCIIArt.BLOCK, curve=1.5)
ASCIIPicture(colored_picture, 'black').save('cat_scale5_block_color.png')
# Colored ASCII with full grayscale
colored_picture = ASCIIArt('cat', 2).draw_color_ascii(ASCIIArt.FULL_RANGE, curve=1.5)
ASCIIPicture(colored_picture).save('cat_scale2_full_range_color.png')
colored_picture = ASCIIArt('cat', 5).draw_color_ascii(ASCIIArt.FULL_RANGE, curve=1.5)
ASCIIPicture(colored_picture).save('cat_scale5_full_range_color.png')
| 44.982759
| 86
| 0.762361
| 391
| 2,609
| 4.846547
| 0.132992
| 0.118206
| 0.094987
| 0.105541
| 0.845383
| 0.821108
| 0.789446
| 0.783113
| 0.703958
| 0.660686
| 0
| 0.02173
| 0.08279
| 2,609
| 57
| 87
| 45.77193
| 0.770163
| 0.11307
| 0
| 0.162162
| 0
| 0
| 0.234273
| 0.156182
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.027027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
71639e106271c5b25d31b7fe8149b88294d49496
| 1,517
|
py
|
Python
|
lztools/text/__init__.py
|
Zanzes/lztools
|
4091416464cbb441f5af26ade6a03ff18ae1bf01
|
[
"MIT"
] | null | null | null |
lztools/text/__init__.py
|
Zanzes/lztools
|
4091416464cbb441f5af26ade6a03ff18ae1bf01
|
[
"MIT"
] | null | null | null |
lztools/text/__init__.py
|
Zanzes/lztools
|
4091416464cbb441f5af26ade6a03ff18ae1bf01
|
[
"MIT"
] | null | null | null |
from lztools.pytools.utils import import_class
from .lztext import words, create_line, pad, pad_length, wall_text, box_text, regex, wrap_lines, insert_spaces, search_words, get_random_word, find_matching, as_literal, print_collection, print_dict
from .lztext import is_collection, print_dir_values, parse_name_list, line, format_seconds, center_on, trim_end, format_api_error, format_mission_errors, format_arg_string, format_test_name
from .lztext import generate_uniqe_date_based_name, generate_uniqe_date_based_name_numeric, generate_text
from .match_pairs import brace_matcher, bracket_matcher, parentheses_matcher, greater_and_less_than_matcher, quote_matcher, quotes_matcher
BlockWriter = import_class()
ColumnWriter = import_class()
__all__ = [
ColumnWriter,
BlockWriter,
# lztext
words,
create_line,
pad,
pad_length,
wall_text,
box_text,
regex,
wrap_lines,
insert_spaces,
search_words,
get_random_word,
find_matching,
as_literal,
print_collection,
print_dict,
is_collection,
print_dir_values,
parse_name_list,
line,
format_seconds,
center_on,
trim_end,
format_api_error,
format_mission_errors,
format_arg_string,
format_test_name,
generate_uniqe_date_based_name,
generate_uniqe_date_based_name_numeric,
generate_text,
# match_pairs
brace_matcher,
bracket_matcher,
parentheses_matcher,
greater_and_less_than_matcher,
quote_matcher,
quotes_matcher
]
| 28.092593
| 198
| 0.771259
| 198
| 1,517
| 5.378788
| 0.353535
| 0.056338
| 0.06385
| 0.082629
| 0.816901
| 0.816901
| 0.816901
| 0.816901
| 0.816901
| 0.816901
| 0
| 0
| 0.173368
| 1,517
| 53
| 199
| 28.622642
| 0.849282
| 0.011866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.152174
| 0
| 0.152174
| 0.108696
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7179dccb185c49aed9df0a7ae83af225c0359b0c
| 2,962
|
py
|
Python
|
fasta2csv/converter.py
|
drewk2021/fastatocsv
|
267ea7939ba2d773d9775c331217f350512d4b55
|
[
"MIT"
] | 1
|
2020-05-12T22:18:17.000Z
|
2020-05-12T22:18:17.000Z
|
build/lib/fasta2csv/converter.py
|
drewk2021/fastatocsv
|
267ea7939ba2d773d9775c331217f350512d4b55
|
[
"MIT"
] | null | null | null |
build/lib/fasta2csv/converter.py
|
drewk2021/fastatocsv
|
267ea7939ba2d773d9775c331217f350512d4b55
|
[
"MIT"
] | 1
|
2020-05-12T22:19:13.000Z
|
2020-05-12T22:19:13.000Z
|
import sys, os
def convert(input,output):
"""
Purpose: To convert a .fasta file of >= 1 sequence(s) into a .csv file, with
two columns, one containing the headline identifier, the other containing the
sequence.
Parameters: the input .fasta file path, a string, and the desired .csv output
path, another string.
Return: the output path, a string.
"""
if not os.path.exists(input):
raise IOError(errno.ENOENT, 'No such file', input)
# Read in Fasta
fasta = open(input, 'r')
fasta_lines = fasta.readlines()
seq = {}
seqs = []
for line in fasta_lines:
if line[0] == ">": # head line with description
seqs += [seq] # adding dicitionary to broader list
seq_local = {}
seq_head = line.strip(">\n")
seq_local["seq_type"] = seq_head # identifier
seq_local["seq"] = "" # actual sequence
seq = seq_local
else: # sequence line
seq["seq"] += line.strip("\n")
fasta.close()
# Convert fasta to csv
seqs.pop(0) # removing first (empty) item in seqs list i.e. fencepost
csv_lines = ["Properties, Sequence\n"]
for seq in seqs:
csv_line = seq["seq_type"] + "," + seq["seq"] + "\n"
csv_lines += csv_line
# Output csv file
csv = open(output, 'w')
csv.writelines(csv_lines)
csv.close()
return output
def convertWithAttributes(input,output):
"""
Purpose: To convert a .fasta file of >= 1 sequence(s) into a .csv file, with
n >= 2 columns, n-1 of which contain attributes of the sequence listed in the
headline identifier, and 1 of which contains the actual sequence.
Parameters: the input .fasta file path, a string, and the desired .csv output
path, another string.
Return: the output path, a string.
"""
if not os.path.exists(input):
raise IOError(errno.ENOENT, 'No such file', input)
# Read in Fasta
fasta = open(input, 'r')
fasta_lines = fasta.readlines()
seq = {}
seqs = []
for line in fasta_lines:
if line[0] == ">": # head line with description
seqs += [seq] # adding dicitionary to broader list
seq_local = {}
seq_head = line.strip(">\n").split("|") # seperating the head's attributes
seq_local["seq_type_list"] = seq_head # identifier
seq_local["seq"] = "" # actual sequence
seq = seq_local
else: # sequence line
seq["seq"] += line.strip("\n")
fasta.close()
# Convert fasta to csv
seqs.pop(0) # removing first (empty) item in seqs list i.e. fencepost
csv_lines = []
for seq in seqs:
csv_line = ""
for type in seq["seq_type_list"]:
csv_line += (type + ",")
csv_lines += (csv_line + "\n")
# Output csv file
csv = open(output, 'w')
csv.writelines(csv_lines)
csv.close()
return output
| 27.682243
| 86
| 0.587441
| 393
| 2,962
| 4.343511
| 0.221374
| 0.037493
| 0.038664
| 0.023433
| 0.77563
| 0.77563
| 0.753368
| 0.753368
| 0.753368
| 0.753368
| 0
| 0.00431
| 0.295071
| 2,962
| 106
| 87
| 27.943396
| 0.813218
| 0.374409
| 0
| 0.736842
| 0
| 0
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.017544
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
71cc60337fc5844e7ddcaf92186ccf4b22e0e8b0
| 12,385
|
py
|
Python
|
tests/seleniumwire/proxy/test_handler.py
|
mar664/selenium-wire
|
8f38554c698343df99731ef934b157922b9c9afb
|
[
"MIT"
] | null | null | null |
tests/seleniumwire/proxy/test_handler.py
|
mar664/selenium-wire
|
8f38554c698343df99731ef934b157922b9c9afb
|
[
"MIT"
] | null | null | null |
tests/seleniumwire/proxy/test_handler.py
|
mar664/selenium-wire
|
8f38554c698343df99731ef934b157922b9c9afb
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from unittest.mock import call, Mock
from seleniumwire.proxy.handler import CaptureRequestHandler
from seleniumwire.proxy.proxy2 import SkipRequest
class CaptureRequestHandlerTest(TestCase):
def test_request_modifier_called(self):
self.handler.request_handler(self.handler, self.body)
self.mock_modifier.modify.assert_called_once_with(self.handler)
def test_save_request_called(self):
self.handler.request_handler(self.handler, self.body)
self.mock_storage.save_request.assert_called_once_with(self.handler, self.body)
def test_ignores_options_method_by_default(self):
self.handler.command = 'OPTIONS'
self.handler.request_handler(self.handler, self.body)
self.assertFalse(self.mock_modifier.modify.called)
self.assertFalse(self.mock_storage.save_request.called)
def test_ignores_get_method(self):
self.handler.server.options = {'ignore_http_methods': ['OPTIONS', 'GET']}
self.handler.request_handler(self.handler, self.body)
self.assertFalse(self.mock_modifier.modify.called)
self.assertFalse(self.mock_storage.save_request.called)
def test_ignores_no_method(self):
self.handler.command = 'OPTIONS'
self.handler.server.options = {'ignore_http_methods': []}
self.handler.request_handler(self.handler, self.body)
self.mock_storage.save_request.assert_called_once_with(self.handler, self.body)
def test_skip_url(self):
self.handler.server.skip_rules = [
r'.*google.com.*'
]
with self.assertRaises(SkipRequest):
self.handler.request_handler(self.handler, self.body)
def test_does_allow_url(self):
self.handler.server.allow_rules = [
r'.*google.com.*'
]
self.handler.request_handler(self.handler, self.body)
def test_doesnt_allow_url(self):
self.handler.server.allow_rules = [
r'.*prod2.server.com.*'
]
with self.assertRaises(SkipRequest):
self.handler.request_handler(self.handler, self.body)
def test_save_response_called(self):
res, res_body = Mock(), Mock()
self.handler.response_handler(self.handler, self.body, res, res_body)
self.mock_storage.save_response.assert_called_once_with('12345', res, res_body)
def test_ignores_response(self):
res, res_body = Mock(), Mock()
delattr(self.handler, 'id')
self.handler.response_handler(self.handler, self.body, res, res_body)
self.assertFalse(self.mock_storage.save_response.called)
def setUp(self):
CaptureRequestHandler.__init__ = Mock(return_value=None)
self.mock_modifier, self.mock_storage = Mock(), Mock()
self.handler = CaptureRequestHandler()
self.handler.server = Mock()
self.handler.id = '12345'
self.handler.server.modifier = self.mock_modifier
self.handler.server.storage = self.mock_storage
self.handler.server.options = {}
self.handler.server.scopes = []
self.handler.server.allow_rules = []
self.handler.server.skip_rules = []
self.handler.path = 'https://www.google.com/foo/bar?x=y'
self.handler.command = 'GET'
self.body = None
class AdminMixinTest(TestCase):
def test_get_requests(self):
self.handler.path = 'http://seleniumwire/requests'
self.mock_storage.load_requests.return_value = [
{'id': '12345'},
{'id': '67890'},
]
self.handler.admin_handler()
self.mock_storage.load_requests.assert_called_once_with()
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 34)],
body=b'[{"id": "12345"}, {"id": "67890"}]'
)
def test_delete_requests(self):
self.handler.path = 'http://seleniumwire/requests'
self.handler.command = 'DELETE'
self.handler.admin_handler()
self.mock_storage.clear_requests.assert_called_once_with()
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 16)],
body=b'{"status": "ok"}'
)
def test_get_last_request(self):
self.handler.path = 'http://seleniumwire/last_request'
self.mock_storage.load_last_request.return_value = {'id': '12345'}
self.handler.admin_handler()
self.mock_storage.load_last_request.assert_called_once_with()
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 15)],
body=b'{"id": "12345"}'
)
def test_get_request_body(self):
self.handler.path = 'http://seleniumwire/request_body?request_id=12345'
self.mock_storage.load_request_body.return_value = b'bodycontent'
self.handler.admin_handler()
self.mock_storage.load_request_body.assert_called_once_with('12345')
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/octet-stream'),
('Content-Length', 11)],
body=b'bodycontent'
)
def test_get_response_body(self):
self.handler.path = 'http://seleniumwire/response_body?request_id=12345'
self.mock_storage.load_response_body.return_value = b'bodycontent'
self.handler.admin_handler()
self.mock_storage.load_response_body.assert_called_once_with('12345')
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/octet-stream'),
('Content-Length', 11)],
body=b'bodycontent'
)
def test_get_response_body_string(self):
self.handler.path = 'http://seleniumwire/response_body?request_id=12345'
self.mock_storage.load_response_body.return_value = 'bodycontent'
self.handler.admin_handler()
self.mock_storage.load_response_body.assert_called_once_with('12345')
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/octet-stream'),
('Content-Length', 11)],
body=b'bodycontent'
)
def test_find(self):
self.handler.path = 'http://seleniumwire/find?path=/foo/bar'
self.mock_storage.find.return_value = {'id': '12345'}
self.handler.admin_handler()
self.mock_storage.find.assert_called_once_with('/foo/bar')
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 15)],
body=b'{"id": "12345"}'
)
def test_find_no_match(self):
self.handler.path = 'http://seleniumwire/find?path=/foo/bar'
self.mock_storage.find.return_value = None
self.handler.admin_handler()
self.mock_storage.find.assert_called_once_with('/foo/bar')
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 4)],
body=b'null'
)
def test_set_header_overrides(self):
self.handler.path = 'http://seleniumwire/header_overrides'
self.handler.command = 'POST'
self.handler.headers = {
'Content-Length': 20
}
self.mock_rfile.read.return_value = b'{"User-Agent": "useragent"}'
self.handler.admin_handler()
self.mock_rfile.read.assert_called_once_with(20)
self.assertEqual(self.mock_modifier.headers, {'User-Agent': 'useragent'})
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 16)],
body=b'{"status": "ok"}'
)
def test_delete_header_overrides(self):
self.handler.path = 'http://seleniumwire/header_overrides'
self.handler.command = 'DELETE'
self.mock_modifier.headers = {'User-Agent': 'useragent'}
self.handler.admin_handler()
self.assertFalse(hasattr(self.mock_modifier, 'headers'))
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 16)],
body=b'{"status": "ok"}'
)
def test_get_header_overrides(self):
self.handler.path = 'http://seleniumwire/header_overrides'
self.mock_modifier.headers = {'User-Agent': 'useragent'}
self.handler.admin_handler()
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 27)],
body=b'{"User-Agent": "useragent"}'
)
def test_set_rewrite_rules(self):
self.handler.path = 'http://seleniumwire/rewrite_rules'
self.handler.command = 'POST'
self.handler.headers = {
'Content-Length': 20
}
self.mock_rfile.read.return_value = b'[["https?://)prod1.server.com(.*)", "\\\\1prod2.server.com\\\\2"]]'
self.handler.admin_handler()
self.mock_rfile.read.assert_called_once_with(20)
self.assertEqual(self.mock_modifier.rewrite_rules,
[["https?://)prod1.server.com(.*)", r"\1prod2.server.com\2"]])
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 16)],
body=b'{"status": "ok"}'
)
def test_delete_rewrite_rules(self):
self.handler.path = 'http://seleniumwire/rewrite_rules'
self.handler.command = 'DELETE'
self.mock_modifier.rewrite_rules = [["https?://)prod1.server.com(.*)", r"\1prod2.server.com\2"]]
self.handler.admin_handler()
self.assertFalse(hasattr(self.mock_modifier, 'rewrite_rules'))
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 16)],
body=b'{"status": "ok"}'
)
def test_get_rewrite_rules(self):
self.handler.path = 'http://seleniumwire/rewrite_rules'
self.mock_modifier.rewrite_rules = [["https?://)prod1.server.com(.*)", r"\1prod2.server.com\2"]]
self.handler.admin_handler()
self.assert_response_mocks_called(
status=200,
headers=[('Content-Type', 'application/json'),
('Content-Length', 62)],
body=b'[["https?://)prod1.server.com(.*)", "\\\\1prod2.server.com\\\\2"]]'
)
def test_no_handler(self):
self.handler.path = 'http://seleniumwire/foobar'
with self.assertRaises(RuntimeError):
self.handler.admin_handler()
def assert_response_mocks_called(self, status, headers, body):
self.mock_send_response.assert_called_once_with(status)
self.mock_send_header.assert_has_calls([call(k, v) for k, v in headers])
self.mock_end_headers.assert_called_once_with()
self.mock_wfile.write.assert_called_once_with(body)
def setUp(self):
CaptureRequestHandler.__init__ = Mock(return_value=None)
self.mock_modifier = Mock()
self.mock_storage = Mock()
self.mock_send_response = Mock()
self.mock_send_header = Mock()
self.mock_end_headers = Mock()
self.mock_rfile = Mock()
self.mock_wfile = Mock()
self.handler = CaptureRequestHandler()
self.handler.server = Mock()
self.handler.server.modifier = self.mock_modifier
self.handler.server.storage = self.mock_storage
self.handler.server.options = {}
self.handler.command = 'GET'
self.handler.send_response = self.mock_send_response
self.handler.send_header = self.mock_send_header
self.handler.end_headers = self.mock_end_headers
self.handler.rfile = self.mock_rfile
self.handler.wfile = self.mock_wfile
| 36.319648
| 113
| 0.628421
| 1,407
| 12,385
| 5.290689
| 0.092395
| 0.132993
| 0.050376
| 0.045674
| 0.833289
| 0.787883
| 0.756985
| 0.726222
| 0.709296
| 0.674234
| 0
| 0.019075
| 0.242309
| 12,385
| 340
| 114
| 36.426471
| 0.77419
| 0
| 0
| 0.566038
| 0
| 0
| 0.16633
| 0.023577
| 0
| 0
| 0
| 0
| 0.169811
| 1
| 0.10566
| false
| 0
| 0.015094
| 0
| 0.128302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e08e981fa285e98548fa0cb8f1ad50a694d8718b
| 17,386
|
py
|
Python
|
src/datadog_api_client/v1/api/dashboard_lists_api.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | null | null | null |
src/datadog_api_client/v1/api/dashboard_lists_api.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | null | null | null |
src/datadog_api_client/v1/api/dashboard_lists_api.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | null | null | null |
# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
import re # noqa: F401
import sys # noqa: F401
from datadog_api_client.v1.api_client import ApiClient, Endpoint as _Endpoint
from datadog_api_client.v1.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types,
)
from datadog_api_client.v1.model.api_error_response import APIErrorResponse
from datadog_api_client.v1.model.dashboard_list import DashboardList
from datadog_api_client.v1.model.dashboard_list_delete_response import DashboardListDeleteResponse
from datadog_api_client.v1.model.dashboard_list_list_response import DashboardListListResponse
class DashboardListsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self._create_dashboard_list_endpoint = _Endpoint(
settings={
"response_type": (DashboardList,),
"auth": ["apiKeyAuth", "appKeyAuth"],
"endpoint_path": "/api/v1/dashboard/lists/manual",
"operation_id": "create_dashboard_list",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"body",
],
"required": [
"body",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"body": (DashboardList,),
},
"attribute_map": {},
"location_map": {
"body": "body",
},
"collection_format_map": {},
},
headers_map={"accept": ["application/json"], "content_type": ["application/json"]},
api_client=api_client,
)
self._delete_dashboard_list_endpoint = _Endpoint(
settings={
"response_type": (DashboardListDeleteResponse,),
"auth": ["apiKeyAuth", "appKeyAuth"],
"endpoint_path": "/api/v1/dashboard/lists/manual/{list_id}",
"operation_id": "delete_dashboard_list",
"http_method": "DELETE",
"servers": None,
},
params_map={
"all": [
"list_id",
],
"required": [
"list_id",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"list_id": (int,),
},
"attribute_map": {
"list_id": "list_id",
},
"location_map": {
"list_id": "path",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self._get_dashboard_list_endpoint = _Endpoint(
settings={
"response_type": (DashboardList,),
"auth": ["apiKeyAuth", "appKeyAuth"],
"endpoint_path": "/api/v1/dashboard/lists/manual/{list_id}",
"operation_id": "get_dashboard_list",
"http_method": "GET",
"servers": None,
},
params_map={
"all": [
"list_id",
],
"required": [
"list_id",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"list_id": (int,),
},
"attribute_map": {
"list_id": "list_id",
},
"location_map": {
"list_id": "path",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self._list_dashboard_lists_endpoint = _Endpoint(
settings={
"response_type": (DashboardListListResponse,),
"auth": ["apiKeyAuth", "appKeyAuth"],
"endpoint_path": "/api/v1/dashboard/lists/manual",
"operation_id": "list_dashboard_lists",
"http_method": "GET",
"servers": None,
},
params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {},
"attribute_map": {},
"location_map": {},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self._update_dashboard_list_endpoint = _Endpoint(
settings={
"response_type": (DashboardList,),
"auth": ["apiKeyAuth", "appKeyAuth"],
"endpoint_path": "/api/v1/dashboard/lists/manual/{list_id}",
"operation_id": "update_dashboard_list",
"http_method": "PUT",
"servers": None,
},
params_map={
"all": [
"list_id",
"body",
],
"required": [
"list_id",
"body",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"list_id": (int,),
"body": (DashboardList,),
},
"attribute_map": {
"list_id": "list_id",
},
"location_map": {
"list_id": "path",
"body": "body",
},
"collection_format_map": {},
},
headers_map={"accept": ["application/json"], "content_type": ["application/json"]},
api_client=api_client,
)
def create_dashboard_list(self, body, **kwargs):
"""Create a dashboard list # noqa: E501
Create an empty dashboard list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_dashboard_list(body, async_req=True)
>>> result = thread.get()
Args:
body (DashboardList): Create a dashboard list request body.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DashboardList
If the method is called asynchronously, returns the request
thread.
"""
kwargs = self._create_dashboard_list_endpoint.default_arguments(kwargs)
kwargs["body"] = body
return self._create_dashboard_list_endpoint.call_with_http_info(**kwargs)
def delete_dashboard_list(self, list_id, **kwargs):
"""Delete a dashboard list # noqa: E501
Delete a dashboard list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dashboard_list(list_id, async_req=True)
>>> result = thread.get()
Args:
list_id (int): ID of the dashboard list to delete.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DashboardListDeleteResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs = self._delete_dashboard_list_endpoint.default_arguments(kwargs)
kwargs["list_id"] = list_id
return self._delete_dashboard_list_endpoint.call_with_http_info(**kwargs)
def get_dashboard_list(self, list_id, **kwargs):
"""Get a dashboard list # noqa: E501
Fetch an existing dashboard list's definition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_list(list_id, async_req=True)
>>> result = thread.get()
Args:
list_id (int): ID of the dashboard list to fetch.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DashboardList
If the method is called asynchronously, returns the request
thread.
"""
kwargs = self._get_dashboard_list_endpoint.default_arguments(kwargs)
kwargs["list_id"] = list_id
return self._get_dashboard_list_endpoint.call_with_http_info(**kwargs)
def list_dashboard_lists(self, **kwargs):
"""Get all dashboard lists # noqa: E501
Fetch all of your existing dashboard list definitions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_dashboard_lists(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DashboardListListResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs = self._list_dashboard_lists_endpoint.default_arguments(kwargs)
return self._list_dashboard_lists_endpoint.call_with_http_info(**kwargs)
def update_dashboard_list(self, list_id, body, **kwargs):
"""Update a dashboard list # noqa: E501
Update the name of a dashboard list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dashboard_list(list_id, body, async_req=True)
>>> result = thread.get()
Args:
list_id (int): ID of the dashboard list to update.
body (DashboardList): Update a dashboard list request body.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
DashboardList
If the method is called asynchronously, returns the request
thread.
"""
kwargs = self._update_dashboard_list_endpoint.default_arguments(kwargs)
kwargs["list_id"] = list_id
kwargs["body"] = body
return self._update_dashboard_list_endpoint.call_with_http_info(**kwargs)
| 39.967816
| 108
| 0.542045
| 1,742
| 17,386
| 5.199196
| 0.118829
| 0.05885
| 0.028707
| 0.020978
| 0.838467
| 0.797946
| 0.778845
| 0.761179
| 0.733466
| 0.718229
| 0
| 0.005593
| 0.372656
| 17,386
| 434
| 109
| 40.059908
| 0.824791
| 0.447141
| 0
| 0.606335
| 0
| 0
| 0.204441
| 0.041999
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027149
| false
| 0
| 0.036199
| 0
| 0.090498
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e0c6ccfc76e44c0c949f867d2141b943c9659b92
| 3,540
|
py
|
Python
|
source/tests/test_id_to_arn_conversion.py
|
josete89/serverless-transit-network-orchestrator
|
6a69e1f7ebcbcba20100f80bc2040f0d2ae86cf8
|
[
"Apache-2.0"
] | 42
|
2019-11-16T18:00:32.000Z
|
2021-09-16T01:10:53.000Z
|
source/tests/test_id_to_arn_conversion.py
|
sukenshah/serverless-transit-network-orchestrator
|
947edac276f56357859f2d2b8434b9d28fa1c6c1
|
[
"Apache-2.0"
] | 38
|
2020-01-31T03:31:29.000Z
|
2021-09-16T03:20:23.000Z
|
source/tests/test_id_to_arn_conversion.py
|
sukenshah/serverless-transit-network-orchestrator
|
947edac276f56357859f2d2b8434b9d28fa1c6c1
|
[
"Apache-2.0"
] | 31
|
2019-12-09T17:20:03.000Z
|
2021-03-30T06:52:02.000Z
|
from lambda_custom_resource import create, update, delete
from lib.logger import Logger
import pytest
logger = Logger('info')
event = {
"RequestType": "Create",
"ServiceToken": "arn:aws:lambda:eu-west-1:999999:function:TransitNetworkOrchestratorCRLambda",
"ResponseURL": "https://cloudformation-custom-resource-response-euwest1.s3-eu-west-1.amazonaws.com/arn",
"StackId": "arn:aws:cloudformation:eu-west-1:999999:stack/StackSet-STNO-hub",
"RequestId": "3d249554-871e-4a25-a46c-d3a7027d3c2f",
"LogicalResourceId": "TestResourceId",
"ResourceType": "Custom::GetPrefixListArns",
"PhysicalResourceId": "80afcad19ddf557011f84a6004bcb96f",
"ResourceProperties": {
"ServiceToken": "arn:aws:lambda:eu-west-1:999999:function:TransitNetworkOrchestratorCRLambda",
"PrefixListIds": "pl-abcd1234, pl-efgh5678",
"AccountId": "999999"
}
}
event_with_extra_comma = {
"RequestType": "Create",
"ServiceToken": "arn:aws:lambda:eu-west-1:999999:function:TransitNetworkOrchestratorCRLambda",
"ResponseURL": "https://cloudformation-custom-resource-response-euwest1.s3-eu-west-1.amazonaws.com/arn",
"StackId": "arn:aws:cloudformation:eu-west-1:999999:stack/StackSet-STNO-hub",
"RequestId": "3d249554-871e-4a25-a46c-d3a7027d3c2f",
"LogicalResourceId": "TestResourceId",
"ResourceType": "Custom::GetPrefixListArns",
"PhysicalResourceId": "80afcad19ddf557011f84a6004bcb96f",
"ResourceProperties": {
"ServiceToken": "arn:aws:lambda:eu-west-1:999999:function:TransitNetworkOrchestratorCRLambda",
"PrefixListIds": "pl-abcd1234, pl-efgh5678",
"AccountId": "999999"
}
}
event_no_values = {
"RequestType": "Create",
"ServiceToken": "arn:aws:lambda:eu-west-1:999999:function:TransitNetworkOrchestratorCRLambda",
"ResponseURL": "https://cloudformation-custom-resource-response-euwest1.s3-eu-west-1.amazonaws.com/arn",
"StackId": "arn:aws:cloudformation:eu-west-1:999999:stack/StackSet-STNO-hub",
"RequestId": "3d249554-871e-4a25-a46c-d3a7027d3c2f",
"LogicalResourceId": "TestResourceId",
"ResourceType": "Custom::GetPrefixListArns",
"ResourceProperties": {
"ServiceToken": "arn:aws:lambda:eu-west-1:999999:function"
":TransitNetworkOrchestratorCRLambda",
"PrefixListIds": "",
"AccountId": "999999"
}
}
context = {}
def test_create_get_prefix_list_arns():
arn_list = create(event, context)
logger.info(arn_list)
for arn in arn_list[1].get('PrefixListArns'):
logger.info(arn)
assert arn.startswith('arn:aws:ec2')
def test_no_empty_string_in_prefix_list_arns():
arn_list = create(event_with_extra_comma, context)
logger.info(arn_list)
empty_string = ''
for arn in arn_list[1].get('PrefixListArns'):
logger.info(arn)
assert empty_string != arn.split('/')[1]
def test_create_empty_get_prefix_list_arns():
with pytest.raises(ValueError, match=r"STNO CFN Parameter Missing: You must"
r" provide at least one valid prefix "
r"list id."):
create(event_no_values, context)
def test_update_get_prefix_list_arns():
arn_list = update(event, context)
logger.info(arn_list)
for arn in arn_list[1].get('PrefixListArns'):
logger.info(arn)
assert arn.startswith('arn:aws:ec2')
def test_delete_get_prefix_list_arns():
response = delete(event, context)
assert response is None
| 38.064516
| 108
| 0.692373
| 384
| 3,540
| 6.25
| 0.239583
| 0.03
| 0.035
| 0.04875
| 0.804167
| 0.794167
| 0.782917
| 0.758333
| 0.758333
| 0.758333
| 0
| 0.072839
| 0.170057
| 3,540
| 92
| 109
| 38.478261
| 0.744044
| 0
| 0
| 0.571429
| 0
| 0.077922
| 0.521751
| 0.250282
| 0
| 0
| 0
| 0
| 0.051948
| 1
| 0.064935
| false
| 0
| 0.038961
| 0
| 0.103896
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e0fe566a082446999b7f016d15862ae237f4a070
| 15,593
|
py
|
Python
|
instances/simulation/inst-20210422-1717/timetabling_closed-SI/inst-20210422-1717-c70-pas1_9h-3h.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/simulation/inst-20210422-1717/timetabling_closed-SI/inst-20210422-1717-c70-pas1_9h-3h.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/simulation/inst-20210422-1717/timetabling_closed-SI/inst-20210422-1717-c70-pas1_9h-3h.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PERIODS
"""
numPeriods = 180
"""
STOPS
"""
numStations = 13
station_names = (
"Hamburg Hbf", # 0
"Landwehr", # 1
"Hasselbrook", # 2
"Wansbeker Chaussee*", # 3
"Friedrichsberg*", # 4
"Barmbek*", # 5
"Alte Woehr (Stadtpark)", # 6
"Ruebenkamp (City Nord)", # 7
"Ohlsdorf*", # 8
"Kornweg", # 9
"Hoheneichen", # 10
"Wellingsbuettel", # 11
"Poppenbuettel*", # 12
)
numStops = 26
stops_position = (
(0, 0), # Stop 0
(2, 0), # Stop 1
(3, 0), # Stop 2
(4, 0), # Stop 3
(5, 0), # Stop 4
(6, 0), # Stop 5
(7, 0), # Stop 6
(8, 0), # Stop 7
(9, 0), # Stop 8
(11, 0), # Stop 9
(13, 0), # Stop 10
(14, 0), # Stop 11
(15, 0), # Stop 12
(15, 1), # Stop 13
(15, 1), # Stop 14
(13, 1), # Stop 15
(12, 1), # Stop 16
(11, 1), # Stop 17
(10, 1), # Stop 18
(9, 1), # Stop 19
(8, 1), # Stop 20
(7, 1), # Stop 21
(6, 1), # Stop 22
(4, 1), # Stop 23
(2, 1), # Stop 24
(1, 1), # Stop 25
)
stops_distance = (
(0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 0
(0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 1
(0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 2
(0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 3
(0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 4
(0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 5
(0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 6
(0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 7
(0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 8
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 9
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 10
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 11
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 12
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 13
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 14
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 15
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 16
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0), # Stop 17
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0), # Stop 18
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0), # Stop 19
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0), # Stop 20
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0), # Stop 21
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0), # Stop 22
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0), # Stop 23
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2), # Stop 24
(1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 25
)
station_start = 0
"""
TRAMS
"""
numTrams = 18
tram_capacity = 514
tram_capacity_cargo = 304
tram_capacity_min_passenger = 208
tram_capacity_min_cargo = 0
tram_speed = 1
tram_headway = 1
tram_min_service = 1
tram_max_service = 10
min_time_next_tram = 0.333
tram_travel_deviation = 0.167
"""
PASSENGERS
"""
passenger_set = "pas-20210422-1717-int1"
passenger_service_time_board = 0.0145
passenger_service_time_alight = 0.0145
"""
CARGO
"""
numCargo = 70
cargo_size = 4
cargo_station_destination = (
8, # 0
4, # 1
8, # 2
5, # 3
8, # 4
3, # 5
4, # 6
5, # 7
3, # 8
12, # 9
12, # 10
5, # 11
5, # 12
3, # 13
4, # 14
5, # 15
5, # 16
8, # 17
5, # 18
5, # 19
8, # 20
5, # 21
12, # 22
8, # 23
4, # 24
5, # 25
8, # 26
5, # 27
12, # 28
3, # 29
4, # 30
12, # 31
12, # 32
4, # 33
3, # 34
4, # 35
12, # 36
12, # 37
5, # 38
12, # 39
3, # 40
5, # 41
5, # 42
4, # 43
4, # 44
3, # 45
12, # 46
5, # 47
8, # 48
3, # 49
5, # 50
3, # 51
8, # 52
12, # 53
4, # 54
8, # 55
8, # 56
3, # 57
3, # 58
12, # 59
8, # 60
3, # 61
3, # 62
8, # 63
4, # 64
12, # 65
12, # 66
5, # 67
3, # 68
4, # 69
)
cargo_release = (
2, # 0
3, # 1
3, # 2
5, # 3
6, # 4
6, # 5
7, # 6
8, # 7
8, # 8
9, # 9
9, # 10
10, # 11
12, # 12
13, # 13
14, # 14
15, # 15
16, # 16
17, # 17
18, # 18
21, # 19
22, # 20
24, # 21
25, # 22
26, # 23
27, # 24
28, # 25
28, # 26
30, # 27
32, # 28
33, # 29
33, # 30
34, # 31
35, # 32
37, # 33
37, # 34
37, # 35
37, # 36
38, # 37
39, # 38
41, # 39
41, # 40
43, # 41
44, # 42
45, # 43
46, # 44
46, # 45
47, # 46
48, # 47
49, # 48
49, # 49
51, # 50
55, # 51
56, # 52
57, # 53
61, # 54
61, # 55
61, # 56
62, # 57
64, # 58
64, # 59
65, # 60
65, # 61
66, # 62
66, # 63
67, # 64
70, # 65
70, # 66
71, # 67
71, # 68
72, # 69
)
cargo_station_deadline = (
33, # 0
119, # 1
119, # 2
176, # 3
123, # 4
59, # 5
72, # 6
171, # 7
18, # 8
90, # 9
175, # 10
142, # 11
88, # 12
84, # 13
105, # 14
170, # 15
155, # 16
156, # 17
140, # 18
173, # 19
123, # 20
126, # 21
91, # 22
36, # 23
87, # 24
127, # 25
144, # 26
134, # 27
141, # 28
163, # 29
101, # 30
108, # 31
144, # 32
47, # 33
162, # 34
76, # 35
175, # 36
97, # 37
87, # 38
164, # 39
114, # 40
143, # 41
142, # 42
55, # 43
56, # 44
56, # 45
57, # 46
118, # 47
160, # 48
59, # 49
112, # 50
168, # 51
170, # 52
139, # 53
71, # 54
71, # 55
71, # 56
82, # 57
135, # 58
90, # 59
109, # 60
161, # 61
151, # 62
128, # 63
77, # 64
80, # 65
169, # 66
97, # 67
129, # 68
99, # 69
)
cargo_max_delay = 3
cargo_service_time_load = 0.3333333333333333
cargo_service_time_unload = 0.25
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 8991598675325360468762009371570610170
#index for seed sequence child
child_seed_index = (
0, # 0
)
"""
Results from timetabling
"""
scheme = "SI"
method = "timetabling_closed"
passengerData = "0-rep"
downstream_cargo = False
delivery_optional = False
assignment_method = "timetabling_closed"
operating = (
False, # 0
False, # 1
False, # 2
False, # 3
True, # 4
True, # 5
True, # 6
True, # 7
True, # 8
True, # 9
True, # 10
True, # 11
True, # 12
True, # 13
True, # 14
True, # 15
True, # 16
True, # 17
)
tram_tour = (
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 0
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 1
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 2
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 3
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 4
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 5
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 6
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 7
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 8
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 9
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 10
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 11
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 12
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 13
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 14
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 15
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 16
(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25), # 17
)
tram_time_arrival = (
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 0
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 1
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 2
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 3
(0.0, 3.0, 5.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 21.0, 24.0, 26.0, 28.0, 30.0, 32.0, 40.0, 43.0, 46.0, 50.0, 52.0, 54.0, 58.0, 60.0, 64.0, 70.0, 80.0), # 4
(2.0, 11.0, 13.0, 15.0, 17.0, 19.0, 22.0, 24.0, 27.0, 30.0, 33.0, 35.0, 37.0, 39.0, 41.0, 48.0, 60.0, 66.0, 71.0, 82.0, 84.0, 86.0, 88.0, 90.0, 92.0, 95.0), # 5
(17.0, 29.0, 31.0, 33.0, 35.0, 37.0, 39.0, 41.0, 43.0, 46.0, 52.0, 54.0, 61.0, 64.0, 72.0, 74.0, 77.0, 80.0, 82.0, 84.0, 86.0, 88.0, 90.0, 92.0, 94.0, 97.0), # 6
(33.0, 40.0, 42.0, 44.0, 46.0, 48.0, 50.0, 52.0, 56.0, 59.0, 64.0, 67.0, 69.0, 72.0, 74.0, 76.0, 79.0, 82.0, 84.0, 86.0, 88.0, 90.0, 92.0, 94.0, 96.0, 99.0), # 7
(39.0, 48.0, 50.0, 52.0, 54.0, 56.0, 58.0, 60.0, 62.0, 65.0, 68.0, 70.0, 72.0, 74.0, 77.0, 79.0, 82.0, 85.0, 87.0, 89.0, 91.0, 93.0, 95.0, 97.0, 99.0, 102.0), # 8
(47.0, 50.0, 52.0, 54.0, 56.0, 58.0, 60.0, 62.0, 64.0, 67.0, 70.0, 72.0, 74.0, 77.0, 79.0, 81.0, 84.0, 87.0, 89.0, 91.0, 93.0, 95.0, 97.0, 99.0, 101.0, 104.0), # 9
(49.0, 52.0, 56.0, 59.0, 64.0, 66.0, 68.0, 70.0, 72.0, 75.0, 78.0, 80.0, 82.0, 84.0, 86.0, 88.0, 91.0, 94.0, 96.0, 98.0, 100.0, 102.0, 105.0, 107.0, 109.0, 112.0), # 10
(53.0, 64.0, 66.0, 68.0, 70.0, 72.0, 74.0, 76.0, 78.0, 81.0, 84.0, 86.0, 88.0, 90.0, 92.0, 94.0, 97.0, 103.0, 105.0, 107.0, 109.0, 120.0, 131.0, 142.0, 145.0, 157.0), # 11
(63.0, 67.0, 69.0, 71.0, 73.0, 75.0, 77.0, 79.0, 81.0, 84.0, 87.0, 89.0, 91.0, 94.0, 96.0, 107.0, 113.0, 123.0, 127.0, 133.0, 138.0, 144.0, 151.0, 162.0, 164.0, 168.0), # 12
(66.0, 70.0, 72.0, 74.0, 76.0, 78.0, 80.0, 82.0, 84.0, 87.0, 90.0, 92.0, 95.0, 97.0, 108.0, 113.0, 122.0, 133.0, 136.0, 138.0, 149.0, 153.0, 162.0, 164.0, 167.0, 170.0), # 13
(69.0, 73.0, 75.0, 77.0, 79.0, 81.0, 83.0, 85.0, 87.0, 90.0, 93.0, 95.0, 97.0, 108.0, 113.0, 122.0, 132.0, 137.0, 142.0, 153.0, 155.0, 163.0, 165.0, 167.0, 169.0, 172.0), # 14
(72.0, 75.0, 77.0, 79.0, 81.0, 83.0, 85.0, 87.0, 89.0, 94.0, 97.0, 99.0, 109.0, 115.0, 126.0, 135.0, 145.0, 157.0, 159.0, 161.0, 163.0, 165.0, 167.0, 169.0, 171.0, 174.0), # 15
(74.0, 77.0, 79.0, 81.0, 83.0, 91.0, 95.0, 97.0, 99.0, 102.0, 114.0, 125.0, 136.0, 138.0, 147.0, 152.0, 156.0, 159.0, 161.0, 163.0, 165.0, 167.0, 169.0, 171.0, 173.0, 176.0), # 16
(76.0, 85.0, 87.0, 89.0, 91.0, 97.0, 100.0, 102.0, 113.0, 125.0, 133.0, 138.0, 148.0, 150.0, 152.0, 155.0, 158.0, 161.0, 163.0, 165.0, 167.0, 169.0, 171.0, 173.0, 175.0, 178.0), # 17
)
tram_time_departure = (
(-0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0, -0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0, -0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0, -0.0), # 0
(-0.0, -0.0, -0.0, -0.0, -0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0), # 1
(-0.0, -0.0, -0.0, -0.0, -0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0), # 2
(-0.0, -0.0, -0.0, -0.0, 0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, 0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0, -0.0), # 3
(1.0, 4.0, 7.0, 9.0, 11.0, 13.0, 15.0, 17.0, 19.0, 22.0, 25.0, 27.0, 29.0, 31.0, 39.0, 41.0, 44.0, 49.0, 51.0, 53.0, 57.0, 59.0, 63.0, 69.0, 78.0, 81.0), # 4
(9.0, 12.0, 14.0, 16.0, 18.0, 21.0, 23.0, 26.0, 28.0, 31.0, 34.0, 36.0, 38.0, 40.0, 47.0, 58.0, 64.0, 70.0, 81.0, 83.0, 85.0, 87.0, 89.0, 91.0, 93.0, 96.0), # 5
(27.0, 30.0, 32.0, 34.0, 36.0, 38.0, 40.0, 42.0, 44.0, 50.0, 53.0, 60.0, 63.0, 71.0, 73.0, 75.0, 78.0, 81.0, 83.0, 85.0, 87.0, 89.0, 91.0, 93.0, 95.0, 98.0), # 6
(38.0, 41.0, 43.0, 45.0, 47.0, 49.0, 51.0, 55.0, 57.0, 62.0, 66.0, 68.0, 71.0, 73.0, 75.0, 77.0, 80.0, 83.0, 85.0, 87.0, 89.0, 91.0, 93.0, 95.0, 97.0, 101.0), # 7
(46.0, 49.0, 51.0, 53.0, 55.0, 57.0, 59.0, 61.0, 63.0, 66.0, 69.0, 71.0, 73.0, 76.0, 78.0, 80.0, 83.0, 86.0, 88.0, 90.0, 92.0, 94.0, 96.0, 98.0, 100.0, 103.0), # 8
(48.0, 51.0, 53.0, 55.0, 57.0, 59.0, 61.0, 63.0, 65.0, 68.0, 71.0, 73.0, 76.0, 78.0, 80.0, 82.0, 85.0, 88.0, 90.0, 92.0, 94.0, 96.0, 98.0, 100.0, 102.0, 105.0), # 9
(50.0, 55.0, 58.0, 63.0, 65.0, 67.0, 69.0, 71.0, 73.0, 76.0, 79.0, 81.0, 83.0, 85.0, 87.0, 89.0, 92.0, 95.0, 97.0, 99.0, 101.0, 104.0, 106.0, 108.0, 110.0, 113.0), # 10
(62.0, 65.0, 67.0, 69.0, 71.0, 73.0, 75.0, 77.0, 79.0, 82.0, 85.0, 87.0, 89.0, 91.0, 93.0, 95.0, 101.0, 104.0, 106.0, 108.0, 119.0, 130.0, 141.0, 144.0, 155.0, 162.0), # 11
(65.0, 68.0, 70.0, 72.0, 74.0, 76.0, 78.0, 80.0, 82.0, 85.0, 88.0, 90.0, 93.0, 95.0, 106.0, 111.0, 121.0, 126.0, 132.0, 137.0, 143.0, 150.0, 161.0, 163.0, 166.0, 169.0), # 12
(68.0, 71.0, 73.0, 75.0, 77.0, 79.0, 81.0, 83.0, 85.0, 88.0, 91.0, 94.0, 96.0, 107.0, 112.0, 120.0, 131.0, 135.0, 137.0, 148.0, 152.0, 161.0, 163.0, 166.0, 168.0, 171.0), # 13
(71.0, 74.0, 76.0, 78.0, 80.0, 82.0, 84.0, 86.0, 88.0, 91.0, 94.0, 96.0, 107.0, 112.0, 121.0, 130.0, 135.0, 141.0, 152.0, 154.0, 162.0, 164.0, 166.0, 168.0, 170.0, 173.0), # 14
(73.0, 76.0, 78.0, 80.0, 82.0, 84.0, 86.0, 88.0, 92.0, 95.0, 98.0, 108.0, 114.0, 125.0, 134.0, 143.0, 155.0, 158.0, 160.0, 162.0, 164.0, 166.0, 168.0, 170.0, 172.0, 175.0), # 15
(75.0, 78.0, 80.0, 82.0, 90.0, 94.0, 96.0, 98.0, 100.0, 112.0, 124.0, 135.0, 137.0, 146.0, 151.0, 154.0, 157.0, 160.0, 162.0, 164.0, 166.0, 168.0, 170.0, 172.0, 174.0, 177.0), # 16
(83.0, 86.0, 88.0, 90.0, 96.0, 99.0, 101.0, 112.0, 123.0, 131.0, 137.0, 147.0, 149.0, 151.0, 154.0, 156.0, 159.0, 162.0, 164.0, 166.0, 168.0, 170.0, 172.0, 174.0, 176.0, 179.0), # 17
)
cargo_tram_assignment = (
5, # 0
5, # 1
7, # 2
5, # 3
7, # 4
5, # 5
7, # 6
5, # 7
5, # 8
7, # 9
7, # 10
7, # 11
7, # 12
6, # 13
8, # 14
8, # 15
6, # 16
6, # 17
6, # 18
6, # 19
7, # 20
6, # 21
6, # 22
6, # 23
11, # 24
7, # 25
8, # 26
8, # 27
7, # 28
8, # 29
7, # 30
14, # 31
11, # 32
7, # 33
8, # 34
7, # 35
7, # 36
10, # 37
11, # 38
17, # 39
8, # 40
11, # 41
8, # 42
8, # 43
9, # 44
9, # 45
9, # 46
11, # 47
11, # 48
10, # 49
12, # 50
12, # 51
11, # 52
11, # 53
11, # 54
11, # 55
11, # 56
12, # 57
12, # 58
12, # 59
13, # 60
13, # 61
13, # 62
13, # 63
13, # 64
14, # 65
16, # 66
17, # 67
17, # 68
17, # 69
)
| 28.611009
| 184
| 0.447252
| 4,026
| 15,593
| 1.717586
| 0.071038
| 0.294143
| 0.416052
| 0.524078
| 0.565148
| 0.510774
| 0.510195
| 0.487925
| 0.483008
| 0.459002
| 0
| 0.502091
| 0.279356
| 15,593
| 544
| 185
| 28.663603
| 0.113286
| 0.116783
| 0
| 0.555085
| 0
| 0
| 0.018038
| 0.001674
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.010593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1ca30ded96ee15c3bb71a7f8f6bbc8217f9c6432
| 39
|
py
|
Python
|
skquantreg/__init__.py
|
Marco-Santoni/skquantreg
|
178e162d317c97bc14d8b13e038f25cc4dce3026
|
[
"BSD-3-Clause"
] | 1
|
2018-09-28T13:59:48.000Z
|
2018-09-28T13:59:48.000Z
|
skquantreg/__init__.py
|
Marco-Santoni/skquantreg
|
178e162d317c97bc14d8b13e038f25cc4dce3026
|
[
"BSD-3-Clause"
] | 1
|
2018-09-28T14:01:49.000Z
|
2018-09-28T14:01:49.000Z
|
skquantreg/__init__.py
|
Marco-Santoni/skquantreg
|
178e162d317c97bc14d8b13e038f25cc4dce3026
|
[
"BSD-3-Clause"
] | null | null | null |
from .quantreg import QuantileRegressor
| 39
| 39
| 0.897436
| 4
| 39
| 8.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1cad3446ed80ac4e363ddd6243c557be2c021f15
| 7,905
|
py
|
Python
|
openprocurement/audit/api/tests/test_utils.py
|
raccoongang/openprocurement.audit.api
|
6e216b1f9dab339943b78c50d9257739238b9495
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/audit/api/tests/test_utils.py
|
raccoongang/openprocurement.audit.api
|
6e216b1f9dab339943b78c50d9257739238b9495
|
[
"Apache-2.0"
] | 23
|
2018-03-29T09:34:27.000Z
|
2018-07-20T06:03:29.000Z
|
openprocurement/audit/api/tests/test_utils.py
|
raccoongang/openprocurement.audit.api
|
6e216b1f9dab339943b78c50d9257739238b9495
|
[
"Apache-2.0"
] | 1
|
2018-05-11T06:06:17.000Z
|
2018-05-11T06:06:17.000Z
|
import unittest
import mock
from datetime import datetime, timedelta
from openprocurement.api.constants import TZ
from openprocurement.audit.api.utils import (
calculate_business_date,
get_access_token,
get_monitoring_accelerator,
calculate_normalized_business_date,
calculate_normalized_date,
)
class CalculateBusinessDateTests(unittest.TestCase):
"""
Test calendar:
2018-01-01 - Mon (holiday)
2018-01-02 - Tue
2018-01-03 - Wed
2018-01-04 - Thu
2018-01-05 - Fri (holiday)
2018-01-06 - Sat (weekend)
2018-01-07 - Sun (weekend)
"""
working_days_mock = {
'2018-01-01': True,
'2018-01-05': True,
}
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_starts_on_non_working(self):
date = datetime(2018, 1, 1, 12, 0, 0, tzinfo=TZ)
result = calculate_business_date(date, timedelta(days=1), working_days=True)
self.assertEqual(result, datetime(2018, 1, 3, 0, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_starts_on_non_working_before_non_working(self):
date = datetime(2017, 12, 31, 12, 0, 0, tzinfo=TZ)
result = calculate_normalized_business_date(date, timedelta(days=1), working_days=True)
self.assertEqual(result, datetime(2018, 1, 3, 0, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_starts_on_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_business_date(date, timedelta(days=1), working_days=True)
self.assertEqual(result, datetime(2018, 1, 3, 12, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_ends_before_non_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_business_date(date, timedelta(days=2), working_days=True)
self.assertEqual(result, datetime(2018, 1, 4, 12, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_ends_on_non_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_business_date(date, timedelta(days=3), working_days=True)
self.assertEqual(result, datetime(2018, 1, 8, 12, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_ends_after_non_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_business_date(date, timedelta(days=4), working_days=True)
self.assertEqual(result, datetime(2018, 1, 9, 12, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.audit.api.utils.calculate_business_date_base')
def test_days_calculation_base_call(self, base_calculate_mock):
date = datetime.now()
result = calculate_business_date(date, timedelta(days=10), working_days=False)
base_calculate_mock.assert_called_once_with(date, timedelta(days=10), working_days=False)
self.assertEqual(result, base_calculate_mock.return_value)
def test_accelerator(self):
date = datetime.now()
result = calculate_business_date(date, timedelta(days=10), accelerator=2)
self.assertEqual(result, date + timedelta(days=10/2))
class CalculateNormalizedBusinessDateTests(unittest.TestCase):
"""
Test calendar:
2018-01-01 - Mon (holiday)
2018-01-02 - Tue
2018-01-03 - Wed
2018-01-04 - Thu
2018-01-05 - Fri (holiday)
2018-01-06 - Sat (weekend)
2018-01-07 - Sun (weekend)
"""
working_days_mock = {
'2018-01-01': True,
'2018-01-05': True,
}
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_starts_on_non_working(self):
date = datetime(2018, 1, 1, 12, 0, 0, tzinfo=TZ)
result = calculate_normalized_business_date(date, timedelta(days=1), working_days=True)
self.assertEqual(result, datetime(2018, 1, 3, 0, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_starts_on_non_working_before_non_working(self):
date = datetime(2017, 12, 31, 12, 0, 0, tzinfo=TZ)
result = calculate_normalized_business_date(date, timedelta(days=1), working_days=True)
self.assertEqual(result, datetime(2018, 1, 3, 0, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_starts_on_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_normalized_business_date(date, timedelta(days=1), working_days=True)
self.assertEqual(result, datetime(2018, 1, 4, 0, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_ends_before_non_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_normalized_business_date(date, timedelta(days=2), working_days=True)
self.assertEqual(result, datetime(2018, 1, 5, 0, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_ends_on_non_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_normalized_business_date(date, timedelta(days=3), working_days=True)
self.assertEqual(result, datetime(2018, 1, 9, 0, 0, 0, tzinfo=TZ))
@mock.patch('openprocurement.tender.core.utils.WORKING_DAYS', working_days_mock)
def test_ends_after_non_working(self):
date = datetime(2018, 1, 2, 12, 0, 0, tzinfo=TZ)
result = calculate_normalized_business_date(date, timedelta(days=4), working_days=True)
self.assertEqual(result, datetime(2018, 1, 10, 0, 0, 0, tzinfo=TZ))
class TestGetMonitoringAccelerator(unittest.TestCase):
def test_acceleration_value(self):
self.assertEqual(get_monitoring_accelerator({'monitoringDetails': 'accelerator=2'}), 2)
def test_no_acceleration(self):
self.assertEqual(get_monitoring_accelerator({}), 0)
class GetAccessTokenTests(unittest.TestCase):
def test_token_query_param(self):
self.assertEqual(get_access_token(
request=mock.Mock(params={'acc_token': 'test_token'})),
'test_token'
)
def test_token_headers(self):
self.assertEqual(get_access_token(
request=mock.Mock(params={}, headers={'X-Access-Token': 'test_token'})),
'test_token'
)
def test_token_body(self):
request = mock.Mock(
method='POST',
content_type='application/json',
params={},
headers={},
json_body={
'access': {
'token': 'test_token'
}
}
)
self.assertEqual(get_access_token(request=request), 'test_token')
def test_no_token(self):
request = mock.Mock(
method='POST',
content_type='application/json',
params={},
headers={},
json_body={}
)
self.assertRaises(ValueError, get_access_token, request=request)
class CalculateNormalizedDateTest(unittest.TestCase):
def test_calculate_ceil(self):
self.assertEqual(
calculate_normalized_date(datetime(2018, 1, 1, 12, 0, 0, tzinfo=TZ), ceil=True),
datetime(2018, 1, 2, 0, 0, 0, tzinfo=TZ)
)
def test_calculate_no_ceil(self):
self.assertEqual(
calculate_normalized_date(datetime(2018, 1, 1, 12, 0, 0, tzinfo=TZ), ceil=False),
datetime(2018, 1, 1, 0, 0, 0, tzinfo=TZ)
)
| 40.126904
| 97
| 0.665149
| 1,037
| 7,905
| 4.863067
| 0.103182
| 0.08725
| 0.044418
| 0.055523
| 0.806068
| 0.792584
| 0.768392
| 0.758279
| 0.73726
| 0.73726
| 0
| 0.071291
| 0.212144
| 7,905
| 196
| 98
| 40.331633
| 0.738439
| 0.05098
| 0
| 0.5
| 0
| 0
| 0.110047
| 0.082535
| 0
| 0
| 0
| 0
| 0.164286
| 1
| 0.157143
| false
| 0
| 0.035714
| 0
| 0.242857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e809daa3c4b0804bc9f7014d3f664cfb3fd5fcbe
| 8,445
|
py
|
Python
|
tests/test_specials_bucketers.py
|
orchardbirds/skorecard-1
|
0f5375a6c159bb35f4b62c5be75a742bf50885e2
|
[
"MIT"
] | null | null | null |
tests/test_specials_bucketers.py
|
orchardbirds/skorecard-1
|
0f5375a6c159bb35f4b62c5be75a742bf50885e2
|
[
"MIT"
] | null | null | null |
tests/test_specials_bucketers.py
|
orchardbirds/skorecard-1
|
0f5375a6c159bb35f4b62c5be75a742bf50885e2
|
[
"MIT"
] | null | null | null |
from skorecard import datasets
from skorecard.bucketers import DecisionTreeBucketer, EqualFrequencyBucketer, EqualWidthBucketer, OptimalBucketer
import numpy as np
import pytest
@pytest.fixture()
def df():
"""Generate dataframe."""
return datasets.load_uci_credit_card(as_frame=True)
def test_specials_tree_bucketer(df):
"""Test that when adding specials,the binner performs as expected.
Context: special values should be binned in their own bin.
"""
X = df[["LIMIT_BAL", "BILL_AMT1"]]
y = df["default"]
specials = {"LIMIT_BAL": {"=50000": [50000], "in [20001,30000]": [20000, 30000]}}
# Because 2 special buckets are defined, the decision tree
# will be fitted with max_leaf_nodes=1. This will create a crash in the sklearn implementation.
# In this case, Skorecard raises an exception with a clear recommendation to the user when the fit method is called.
tbt = DecisionTreeBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], random_state=1, max_n_bins=3, specials=specials)
with pytest.raises(ValueError):
tbt.fit_transform(X, y)
tbt = DecisionTreeBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], random_state=1, max_n_bins=5, specials=specials)
X_bins = tbt.fit_transform(X, y)
assert X_bins["BILL_AMT1"].nunique() == 5
assert X_bins["LIMIT_BAL"].nunique() == 5
assert X_bins[X["LIMIT_BAL"] == 50000]["LIMIT_BAL"].unique() == np.array(4)
# Test that the labels are properly assigned. Because there are no specials in BILL_AMT1, there should be no extra
# bins
assert len(tbt.features_bucket_mapping_["BILL_AMT1"].labels) == 6
# check that the last label finishes with inf
assert tbt.features_bucket_mapping_["BILL_AMT1"].labels[0].startswith("(-inf")
assert tbt.features_bucket_mapping_["BILL_AMT1"].labels[4].endswith("inf)")
# Test that the labels are properly assigned. Because there are 2 specials in LIMIT_BAL, there should be 2 extra
# bins
assert len(tbt.features_bucket_mapping_["LIMIT_BAL"].labels) == 6
# check that the labels match the specials dictionary
assert (
tbt.features_bucket_mapping_["LIMIT_BAL"].labels[4].endswith([key for key in specials["LIMIT_BAL"].keys()][0])
)
assert (
tbt.features_bucket_mapping_["LIMIT_BAL"].labels[5].endswith([key for key in specials["LIMIT_BAL"].keys()][1])
)
# Assert a value error is raised if the specials contains features not defined in the bucketer.
specials = {"LIMIT_BAL": {"=50000": [50000], "in [20001,30000]": [20000, 30000]}, "Undefinedfeature": {"1": [2]}}
with pytest.raises(ValueError):
DecisionTreeBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], random_state=1, max_n_bins=3, specials=specials).fit_transform(X, y)
def test_specials_equal_width_bucketer(df):
"""Test that when adding specials,the binner performs as expected.
Context: special values should be binned in their own bin.
"""
X = df[["LIMIT_BAL", "BILL_AMT1"]]
y = df["default"]
specials = {"LIMIT_BAL": {"=50000": [50000], "in [20001,30000]": [20000, 30000]}}
ebt = EqualWidthBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], n_bins=3, specials=specials)
X_bins = ebt.fit_transform(X, y)
assert X_bins["BILL_AMT1"].nunique() == 3
assert X_bins["LIMIT_BAL"].nunique() == 5 # maximum n_bins +2 coming from the specials
assert X_bins[X["LIMIT_BAL"] == 50000]["LIMIT_BAL"].unique() == np.array(4)
# Test that the labels are properly assigned. Because there are no specials in BILL_AMT1, there should be no extra
# bins
assert len(ebt.features_bucket_mapping_["BILL_AMT1"].labels) == 4
# check that the last label finishes with inf
assert ebt.features_bucket_mapping_["BILL_AMT1"].labels[0].startswith("(-inf")
assert ebt.features_bucket_mapping_["BILL_AMT1"].labels[2].endswith("inf)")
# Test that the labels are properly assigned. Because there are 2 specials in LIMIT_BAL, there should be 2 extra
# bins
assert len(ebt.features_bucket_mapping_["LIMIT_BAL"].labels) == 6
# check that the labels match the specials dictionary
assert (
ebt.features_bucket_mapping_["LIMIT_BAL"].labels[4].endswith([key for key in specials["LIMIT_BAL"].keys()][0])
)
assert (
ebt.features_bucket_mapping_["LIMIT_BAL"].labels[5].endswith([key for key in specials["LIMIT_BAL"].keys()][1])
)
# Assert a value error is raised if the specials contains features not defined in the bucketer.
specials = {"LIMIT_BAL": {"=50000": [50000], "in [20001,30000]": [20000, 30000]}, "Undefinedfeature": {"1": [2]}}
with pytest.raises(ValueError):
EqualWidthBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], n_bins=3, specials=specials).fit_transform(X, y)
def test_specials_equal_frequency_bucketer(df):
"""Test that when adding specials,the binner performs as expected.
Context: special values should be binned in their own bin.
"""
X = df[["LIMIT_BAL", "BILL_AMT1"]]
y = df["default"]
specials = {"LIMIT_BAL": {"=50000": [50000], "in [20001,30000]": [20000, 30000]}}
ebt = EqualFrequencyBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], n_bins=3, specials=specials)
X_bins = ebt.fit_transform(X, y)
assert X_bins["BILL_AMT1"].nunique() == 3
assert X_bins["LIMIT_BAL"].nunique() == 5 # maximum n_bins +2 coming from the specials
assert X_bins[X["LIMIT_BAL"] == 50000]["LIMIT_BAL"].unique() == np.array(4)
# Test that the labels are properly assigned. Because there are no specials in BILL_AMT1, there should be no extra
# bins
assert len(ebt.features_bucket_mapping_["BILL_AMT1"].labels) == 4
# check that the last label finishes with inf
assert ebt.features_bucket_mapping_["BILL_AMT1"].labels[0].startswith("(-inf")
assert ebt.features_bucket_mapping_["BILL_AMT1"].labels[2].endswith("inf)")
# Test that tha labels are properly assigned. Because there are 2 specials in LIMIT_BAL, there should be 2 extra
# bins
assert len(ebt.features_bucket_mapping_["LIMIT_BAL"].labels) == 6
# check that the labels match the specials dictionary
assert (
ebt.features_bucket_mapping_["LIMIT_BAL"].labels[4].endswith([key for key in specials["LIMIT_BAL"].keys()][0])
)
assert (
ebt.features_bucket_mapping_["LIMIT_BAL"].labels[5].endswith([key for key in specials["LIMIT_BAL"].keys()][1])
)
# Assert a value error is raised if the specials contains features not defined in the bucketer.
specials = {"LIMIT_BAL": {"=50000": [50000], "in [20001,30000]": [20000, 30000]}, "Undefinedfeature": {"1": [2]}}
with pytest.raises(ValueError):
EqualFrequencyBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], n_bins=3, specials=specials).fit_transform(X, y)
def _test_specials_optimal_bucketer(df):
"""Test that when adding specials,the binner performs as expected.
Context: special values should be binned in their own bin.
"""
X = df[["LIMIT_BAL", "BILL_AMT1"]]
y = df["default"]
specials = {"LIMIT_BAL": {"=50000": [50000], "in [20001,30000]": [20000, 30000]}}
opt = OptimalBucketer(variables=["LIMIT_BAL", "BILL_AMT1"], random_state=1, max_n_bins=3, specials=specials)
X_bins = opt.fit_transform(X, y)
assert X_bins["BILL_AMT1"].nunique() == 3
assert X_bins["LIMIT_BAL"].nunique() == 5 # maximum n_bins +2 coming from the specials
assert X_bins[X["LIMIT_BAL"] == 50000]["LIMIT_BAL"].unique() == np.array(3)
# Test that tha labels are properly assigned. Because there are no specials in BILL_AMT1, there should be no extra
# bins
assert len(opt.features_bucket_mapping_["BILL_AMT1"].labels) == 3
# check that the last label finishes with inf
assert opt.features_bucket_mapping_["BILL_AMT1"].labels[0].startswith("(-inf")
assert opt.features_bucket_mapping_["BILL_AMT1"].labels[2].endswith("inf)")
# Test that tha labels are properly assigned. Because there are 2 specials in LIMIT_BAL, there should be 2 extra
# bins
assert len(opt.features_bucket_mapping_["LIMIT_BAL"].labels) == 5
# check that the labels match the specials dictionary
assert (
opt.features_bucket_mapping_["LIMIT_BAL"].labels[3].endswith([key for key in specials["LIMIT_BAL"].keys()][0])
)
assert (
opt.features_bucket_mapping_["LIMIT_BAL"].labels[4].endswith([key for key in specials["LIMIT_BAL"].keys()][1])
)
| 45.648649
| 135
| 0.69852
| 1,202
| 8,445
| 4.72213
| 0.118968
| 0.077519
| 0.088795
| 0.033827
| 0.901163
| 0.897287
| 0.897287
| 0.883897
| 0.862403
| 0.830514
| 0
| 0.04806
| 0.169686
| 8,445
| 184
| 136
| 45.896739
| 0.761409
| 0.297573
| 0
| 0.536842
| 0
| 0
| 0.167866
| 0
| 0
| 0
| 0
| 0
| 0.378947
| 1
| 0.052632
| false
| 0
| 0.042105
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1c02feb4945764fc32134883b01e3671483f6906
| 2,265
|
py
|
Python
|
tests/molecular/molecules/molecule/fixtures/cof/periodic_square.py
|
stevenbennett96/stk
|
6e5af87625b83e0bfc7243bc42d8c7a860cbeb76
|
[
"MIT"
] | null | null | null |
tests/molecular/molecules/molecule/fixtures/cof/periodic_square.py
|
stevenbennett96/stk
|
6e5af87625b83e0bfc7243bc42d8c7a860cbeb76
|
[
"MIT"
] | null | null | null |
tests/molecular/molecules/molecule/fixtures/cof/periodic_square.py
|
stevenbennett96/stk
|
6e5af87625b83e0bfc7243bc42d8c7a860cbeb76
|
[
"MIT"
] | null | null | null |
import pytest
import stk
from ...case_data import CaseData
@pytest.fixture(
scope='session',
params=(
lambda name: CaseData(
molecule=stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicSquare(
building_blocks=(
stk.BuildingBlock(
smiles='BrC1=C(Br)[C+]=N1',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles='BrC1=C(Br)C(F)(Br)[C+]1Br',
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(2, 2, 1),
),
),
smiles=(
'FC12C3=C(N=[C+]3)C3=C4C5=C([C+]=N5)[C+]5C6=C7C8=C([C+'
']=N8)[C+]3C4(F)C3=C(N=[C+]3)C3=C1C1=C([C+]=N1)[C+]1C('
'=C(C4=C([C+]=N4)[C+]32)C1(F)C1=C6N=[C+]1)C1=C([C+]=N1'
')C75F'
),
name=name,
),
lambda name: CaseData(
molecule=stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicSquare(
building_blocks=(
stk.BuildingBlock(
smiles='BrC1=C(Br)[C+]=N1',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles='BrC1=C(Br)C(F)(Br)[C+]1Br',
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(2, 2, 1),
optimizer=stk.PeriodicCollapser(),
),
),
smiles=(
'FC12C3=C(N=[C+]3)C3=C4C5=C([C+]=N5)[C+]5C6=C7C8=C([C+'
']=N8)[C+]3C4(F)C3=C(N=[C+]3)C3=C1C1=C([C+]=N1)[C+]1C('
'=C(C4=C([C+]=N4)[C+]32)C1(F)C1=C6N=[C+]1)C1=C([C+]=N1'
')C75F'
),
name=name,
),
),
)
def cof_periodic_square(request) -> CaseData:
return request.param(
f'{request.fixturename}{request.param_index}',
)
| 34.318182
| 71
| 0.392936
| 226
| 2,265
| 3.876106
| 0.283186
| 0.022831
| 0.100457
| 0.118721
| 0.776256
| 0.776256
| 0.776256
| 0.776256
| 0.776256
| 0.776256
| 0
| 0.065339
| 0.445916
| 2,265
| 65
| 72
| 34.846154
| 0.632669
| 0
| 0
| 0.803279
| 0
| 0.098361
| 0.203532
| 0.181015
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0
| 0.04918
| 0.016393
| 0.081967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1c30dde957ec6ec2c8428b4859b69bf5c1bf83fb
| 73
|
py
|
Python
|
acousticsim/representations/mhec.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | 5
|
2018-01-15T22:06:20.000Z
|
2022-02-21T07:02:40.000Z
|
acousticsim/representations/mhec.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | null | null | null |
acousticsim/representations/mhec.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | 2
|
2019-11-28T17:06:27.000Z
|
2019-12-05T22:57:28.000Z
|
from .base import Representation
class Mhec(Representation):
pass
| 10.428571
| 32
| 0.753425
| 8
| 73
| 6.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191781
| 73
| 6
| 33
| 12.166667
| 0.932203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
1c4884286d5cca9042d1e66e455a44b672e6671c
| 2,340
|
py
|
Python
|
tests/test_user_settings.py
|
fearsd/django-logging-middleware
|
6eb95774c1bcb1829aa1a94223d9e2c39217d8f9
|
[
"MIT"
] | 4
|
2021-04-08T14:14:04.000Z
|
2021-09-08T07:57:38.000Z
|
tests/test_user_settings.py
|
fearsd/django-logging-middleware
|
6eb95774c1bcb1829aa1a94223d9e2c39217d8f9
|
[
"MIT"
] | null | null | null |
tests/test_user_settings.py
|
fearsd/django-logging-middleware
|
6eb95774c1bcb1829aa1a94223d9e2c39217d8f9
|
[
"MIT"
] | null | null | null |
import pytest
from django.conf import settings as _settings
from django.test import override_settings
from logging_middleware.settings import DEFAULTS, Settings
@override_settings(DJANGO_LOGGING_MIDDLEWARE={})
def test_check_settings_if_user_didnt_set_settings():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': True, 'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_set_default_format_true_and_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': False, 'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_set_default_format_false_and_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == False
assert settings.MESSAGE_FORMAT == '{message}'
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': False})
def test_check_settings_if_user_set_default_format_false_but_didnt_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == False
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': True})
def test_check_settings_if_user_set_default_format_true_and_didnt_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_didnt_set_default_format_but_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': ''})
def test_check_settings_if_user_set_wrong_data():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
| 48.75
| 102
| 0.814103
| 283
| 2,340
| 6.236749
| 0.09894
| 0.16204
| 0.108782
| 0.115014
| 0.898017
| 0.875921
| 0.875921
| 0.85949
| 0.800567
| 0.774504
| 0
| 0
| 0.092735
| 2,340
| 47
| 103
| 49.787234
| 0.831371
| 0
| 0
| 0.512821
| 0
| 0
| 0.12906
| 0
| 0
| 0
| 0
| 0
| 0.358974
| 1
| 0.179487
| false
| 0
| 0.102564
| 0
| 0.282051
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
98ca18d5b9fd51cb147f999cc6268f38eb92f61b
| 126,501
|
py
|
Python
|
src/config/vnc_openstack/vnc_openstack/tests/test_basic.py
|
pawelzny/contrail-controller
|
4950d3144cb8c422264ddb2a926cf4fe9e40b14d
|
[
"Apache-2.0"
] | null | null | null |
src/config/vnc_openstack/vnc_openstack/tests/test_basic.py
|
pawelzny/contrail-controller
|
4950d3144cb8c422264ddb2a926cf4fe9e40b14d
|
[
"Apache-2.0"
] | null | null | null |
src/config/vnc_openstack/vnc_openstack/tests/test_basic.py
|
pawelzny/contrail-controller
|
4950d3144cb8c422264ddb2a926cf4fe9e40b14d
|
[
"Apache-2.0"
] | null | null | null |
import sys
import json
import re
import mock
import unittest
from testtools import ExpectedException
import webtest.app
import datetime
sys.path.append('../common/tests')
from vnc_openstack import neutron_plugin_db
from cfgm_common.exceptions import NoIdError
from cfgm_common import PERMS_RWX, PERMS_NONE, PERMS_RX
from test_utils import *
import test_common
import requests
import test_case
try:
from neutron_lib import constants
except ImportError:
from neutron.common import constants
_IFACE_ROUTE_TABLE_NAME_PREFIX = 'NEUTRON_IFACE_RT'
class TestBasic(test_case.NeutronBackendTestCase):
@classmethod
def setUpClass(cls):
super(TestBasic, cls).setUpClass(
extra_config_knobs=[
('DEFAULTS', 'apply_subnet_host_routes', True)
])
def test_list_with_inconsistent_members(self):
# 1. create collection
# 2. list, verify full collection
# 3. mess with one in vnc_to_neutron, verify collection-1
# 4. restore, list, verify full collection
proj_obj = self._vnc_lib.project_read(
fq_name=['default-domain', 'default-project'])
objects = {}
for (obj_type, obj_class, create_method_name) in \
[('virtual_network', vnc_api.VirtualNetwork,
'virtual_network_create'),
('network_ipam', vnc_api.NetworkIpam,
'network_ipam_create'),
('network_policy', vnc_api.NetworkPolicy,
'network_policy_create'),
('logical_router', vnc_api.LogicalRouter,
'logical_router_create'),
('security_group', vnc_api.SecurityGroup,
'security_group_create'),
('route_table', vnc_api.RouteTable,
'route_table_create'),
('service_instance', vnc_api.ServiceInstance,
'service_instance_create')]:
objects[obj_type] = [obj_class('%s-%s' %(self.id(), i))
for i in range(3)]
for obj in objects[obj_type]:
create_method = getattr(self._vnc_lib, create_method_name)
create_method(obj)
objects['virtual_machine_interface'] = \
[vnc_api.VirtualMachineInterface('%s-%s' %(self.id(), i), proj_obj)
for i in range(3)]
for obj in objects['virtual_machine_interface']:
obj.add_virtual_network(vnc_api.VirtualNetwork())
self._vnc_lib.virtual_machine_interface_create(obj)
vn_obj = vnc_api.VirtualNetwork(self.id())
sn0_id = str(uuid.uuid4())
sn1_id = str(uuid.uuid4())
sn2_id = str(uuid.uuid4())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(vnc_api.SubnetType('1.1.1.0', 28),
subnet_uuid=sn0_id),
vnc_api.IpamSubnetType(vnc_api.SubnetType('2.2.2.0', 28),
subnet_uuid=sn1_id),
vnc_api.IpamSubnetType(vnc_api.SubnetType('3.3.3.0', 28),
subnet_uuid=sn2_id)]))
self._vnc_lib.virtual_network_create(vn_obj)
fip_pool_obj = vnc_api.FloatingIpPool(self.id(), vn_obj)
self._vnc_lib.floating_ip_pool_create(fip_pool_obj)
objects['floating_ip'] = [vnc_api.FloatingIp('%s-%s' %(self.id(), i),
fip_pool_obj)
for i in range(3)]
for obj in objects['floating_ip']:
obj.add_project(proj_obj)
self._vnc_lib.floating_ip_create(obj)
collection_types = [
(objects['virtual_network'], 'network',
'_network_vnc_to_neutron'),
(objects['virtual_machine_interface'], 'port',
'_port_vnc_to_neutron'),
(objects['network_ipam'], 'ipam',
'_ipam_vnc_to_neutron'),
(objects['network_policy'], 'policy',
'_policy_vnc_to_neutron'),
(objects['logical_router'], 'router',
'_router_vnc_to_neutron'),
(objects['floating_ip'], 'floatingip',
'_floatingip_vnc_to_neutron'),
(objects['security_group'], 'security_group',
'_security_group_vnc_to_neutron'),
(objects['route_table'], 'route_table',
'_route_table_vnc_to_neutron'),
(objects['service_instance'], 'nat_instance',
'_svc_instance_vnc_to_neutron'),
]
# for collections that are objects in contrail model
for (objs, res_url_pfx, res_xlate_name) in collection_types:
res_dicts = self.list_resource(res_url_pfx)
present_ids = [r['id'] for r in res_dicts]
for obj in objs:
self.assertIn(obj.uuid, present_ids)
@neutron_plugin_db.catch_convert_exception
def err_on_object_2(orig_method, res_obj, *args, **kwargs):
if res_obj.uuid == objs[2].uuid:
raise Exception('faking inconsistent element')
return orig_method(res_obj, *args, **kwargs)
with test_common.patch(
self.neutron_db_obj, res_xlate_name, err_on_object_2):
res_dicts = self.list_resource(res_url_pfx)
present_ids = [r['id'] for r in res_dicts]
self.assertNotIn(objs[2].uuid, present_ids)
res_dicts = self.list_resource(res_url_pfx)
present_ids = [r['id'] for r in res_dicts]
for obj in objs:
self.assertIn(obj.uuid, present_ids)
# end for collections that are objects in contrail model
# subnets, sg-rules etc.
res_dicts = self.list_resource('subnet')
present_ids = [r['id'] for r in res_dicts]
for sn_id in [sn0_id, sn1_id, sn2_id]:
self.assertIn(sn_id, present_ids)
@neutron_plugin_db.catch_convert_exception
def err_on_sn2(orig_method, subnet_vnc, *args, **kwargs):
if subnet_vnc.subnet_uuid == sn2_id:
raise Exception('faking inconsistent element')
return orig_method(subnet_vnc, *args, **kwargs)
with test_common.patch(
self.neutron_db_obj, '_subnet_vnc_to_neutron', err_on_sn2):
res_dicts = self.list_resource('subnet')
present_ids = [r['id'] for r in res_dicts]
self.assertNotIn(sn2_id, present_ids)
for obj_type, obj_list in objects.items():
delete_method = getattr(self._vnc_lib, obj_type+'_delete')
for obj in obj_list:
delete_method(id=obj.uuid)
# end test_list_with_inconsistent_members
def test_extra_fields_on_network(self):
test_obj = self._create_test_object()
context = {'operation': 'READ',
'user_id': '',
'roles': ''}
data = {'fields': None,
'id': test_obj.uuid}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/network', body)
net_dict = json.loads(resp.text)
self.assertIn('fq_name', net_dict)
# end test_extra_fields_on_network
def test_port_bindings(self):
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('1.1.1.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("somehost")
vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:profile': {'foo': 'bar'},
'binding:host_id': 'somehost'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
self.assertTrue(isinstance(port_dict['binding:profile'], dict))
self.assertTrue(isinstance(port_dict['binding:host_id'], basestring))
# Clean the resources
self.delete_resource('port', proj_uuid, port_dict['id'])
self._vnc_lib.security_group_delete(id=sg_obj.uuid)
self._vnc_lib.virtual_router_delete(id=vr_obj)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
# end test_port_bindings
def test_sub_interfaces_with_vm_attached(self):
vn = vnc_api.VirtualNetwork('vn-%s' % (self.id()))
self._vnc_lib.virtual_network_create(vn)
vmi_prop = vnc_api.VirtualMachineInterfacePropertiesType(sub_interface_vlan_tag=256)
vmi_name = 'vmi1'
vm_name = 'vm1'
vm = vnc_api.VirtualMachine(vm_name)
self._vnc_lib.virtual_machine_create(vm)
vmi_obj = vnc_api.VirtualMachineInterface(vmi_name, parent_obj=vnc_api.Project())
vmi_obj.set_virtual_network(vn)
vmi_obj.add_virtual_machine(vm)
vmi_id = self._vnc_lib.virtual_machine_interface_create(vmi_obj)
vmi_name = 'sub_vmi1'
sub_vmi_obj = vnc_api.VirtualMachineInterface(vmi_name, parent_obj=vnc_api.Project(),
virtual_machine_interface_properties=vmi_prop)
sub_vmi_obj.set_virtual_network(vn)
sub_vmi_obj.set_virtual_machine_interface(vmi_obj)
self._vnc_lib.virtual_machine_interface_create(sub_vmi_obj)
sub_intf_dict = self.read_resource('port', sub_vmi_obj.uuid)
self.assertEqual(sub_intf_dict['status'], constants.PORT_STATUS_ACTIVE)
# Clean the resources
self._vnc_lib.virtual_machine_interface_delete(id=vmi_obj.uuid)
self._vnc_lib.virtual_machine_interface_delete(id=sub_vmi_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn.uuid)
self._vnc_lib.virtual_machine_delete(id=vm.uuid)
# end test_sub_interfaces_with_vm_attached
def test_sub_interfaces_with_no_vm_attached(self):
vn = vnc_api.VirtualNetwork('vn-%s' % (self.id()))
self._vnc_lib.virtual_network_create(vn)
vmi_prop = vnc_api.VirtualMachineInterfacePropertiesType(sub_interface_vlan_tag=256)
vmi_name = 'vmi2'
vmi_obj = vnc_api.VirtualMachineInterface(
vmi_name, parent_obj=vnc_api.Project())
vmi_obj.set_virtual_network(vn)
vmi_id = self._vnc_lib.virtual_machine_interface_create(vmi_obj)
vmi_name = 'sub_vmi2'
sub_vmi_obj = vnc_api.VirtualMachineInterface(
vmi_name, parent_obj=vnc_api.Project(),
virtual_machine_interface_properties=vmi_prop)
sub_vmi_obj.set_virtual_network(vn)
sub_vmi_obj.set_virtual_machine_interface(vmi_obj)
self._vnc_lib.virtual_machine_interface_create(sub_vmi_obj)
sub_intf_dict = self.read_resource('port', sub_vmi_obj.uuid)
self.assertEqual(sub_intf_dict['status'], constants.PORT_STATUS_DOWN)
# Clean the resources
self._vnc_lib.virtual_machine_interface_delete(id=vmi_obj.uuid)
self._vnc_lib.virtual_machine_interface_delete(id=sub_vmi_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn.uuid)
# end test_sub_interfaces_with_no_vm_attached
@unittest.skip("Flaky test in CI")
def test_baremetal_logical_interface_bindings(self):
""" This test tests the Logical to Physical interface binding.
A Physical interface is created to represent the actual
physical port on the QFX switch. A Baremetal Server
is launched on a virtual network. As a part of this operation,
a Logical interface is created and is matched with the physical
inteface specified by the binding profile in the port.
This test verifies the binidng takes place correctly.
"""
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('1.1.1.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("myhost")
vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
pr_name = self.id() + '_physical_router'
pr = vnc_api.PhysicalRouter(pr_name)
pr_uuid = self._vnc_lib.physical_router_create(pr)
pr_obj = self._vnc_lib.physical_router_read(id=pr_uuid)
esi_id = '00:11:22:33:44:55:66:77:88:99'
pi_name = self.id() + '_physical_interface1'
pi = vnc_api.PhysicalInterface(name=pi_name,
parent_obj=pr_obj,
ethernet_segment_identifier=esi_id)
pi_uuid = self._vnc_lib.physical_interface_create(pi)
pi_obj = self._vnc_lib.physical_interface_read(id=pi_uuid)
pi_fq_name = pi_obj.get_fq_name()
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
vnic_type = 'baremetal'
binding_profile = {'local_link_information': [
{'port_id': pi_fq_name[2], 'switch_id': pi_fq_name[2],
'switch_info': pi_fq_name[1]}]}
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:profile': binding_profile,
'binding:vnic_type': vnic_type,
'binding:host_id': 'myhost'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
# Make sure that the binding profile for baremetal is set correctly
match = port_dict['binding:profile'] == binding_profile
self.assertTrue(match)
bound_logical_interface_found = False
li_dict = self._vnc_lib.logical_interfaces_list()
lis = li_dict['logical-interfaces']
for l in lis:
li_obj = self._vnc_lib.logical_interface_read(id=l['uuid'])
if li_obj.parent_uuid == pi_uuid:
bound_logical_interface_found = True
break
self.assertTrue(bound_logical_interface_found)
# Now test the negative test case. In this case provide
# bogus binding profile information and make sure no logocal
# interface is created and/or bound.
# Delete the previous logical interface and port
self._vnc_lib.logical_interface_delete(id=l['uuid'])
self.delete_resource('port', proj_uuid, port_dict['id'])
vnic_type = 'normal'
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:profile': {'foo': 'bar'},
'binding:vnic_type': vnic_type,
'binding:host_id': 'myhost'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
# This time it should not match
match = port_dict['binding:profile'] == binding_profile
self.assertFalse(match)
bound_logical_interface_found = False
li_dict = self._vnc_lib.logical_interfaces_list()
lis = li_dict['logical-interfaces']
for l in lis:
li_obj = self._vnc_lib.logical_interface_read(id=l['uuid'])
if li_obj.parent_uuid == pi_uuid:
bound_logical_interface_found = True
break
self.assertFalse(bound_logical_interface_found)
# Clen up the resources
self._vnc_lib.physical_interface_delete(id=pi_uuid)
self._vnc_lib.physical_router_delete(id=pr_uuid)
self.delete_resource('port', proj_uuid, port_dict['id'])
self._vnc_lib.security_group_delete(id=sg_obj.uuid)
self._vnc_lib.virtual_router_delete(id=vr_obj)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
# end test_baremetal_logical_interface_bindings
def test_baremetal_bindings_with_vpg_and_multi_vlan(self):
""" This test tests the VPG->PI and VPG->VMI association.
Multiple physical interfaces are created to represent
members of a vpg group. A Baremetal Server
is launched on a virtual network. As a part of this operation,
a VPG is created and gets binded with VMIs ad PIs.
This test verifies the binidng and unbinding of these objects
takes place correctly.
"""
mock_zk = self._api_server._db_conn._zk_db
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('1.1.1.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vn2_obj = vnc_api.VirtualNetwork(self.id() + 'vn2')
vn2_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('2.2.2.0', 24))]))
self._vnc_lib.virtual_network_create(vn2_obj)
vr_obj = vnc_api.VirtualRouter("myhost")
vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
pr_name = self.id() + '_physical_router'
pr = vnc_api.PhysicalRouter(pr_name)
pr_uuid = self._vnc_lib.physical_router_create(pr)
pr_obj = self._vnc_lib.physical_router_read(id=pr_uuid)
fabric_name = self.id() + '_fabric'
fabric = vnc_api.Fabric(fabric_name)
fabric_uuid = self._vnc_lib.fabric_create(fabric)
fabric_obj = self._vnc_lib.fabric_read(id=fabric_uuid)
fabric_fq_name = fabric_obj.get_fq_name()
pr_obj.set_fabric(fabric_obj)
self._vnc_lib.physical_router_update(pr_obj)
num_phy_interfaces = 2
pi_uuid = []
pi_fq_name = []
binding_profile = {'local_link_information':[]}
binding_profile_update = {'local_link_information':[]}
for i in range(num_phy_interfaces+1):
pi_name = self.id() + 'ge-0/0/%s' %i
mac = vnc_api.MacAddressesType(mac_address=
['00:01:00:00:0f:c' + str(i)])
pi = vnc_api.PhysicalInterface(name=pi_name,
parent_obj=pr_obj, physical_interface_mac_addresses = mac)
pi_uuid.append(self._vnc_lib.physical_interface_create(pi))
pi_obj = self._vnc_lib.physical_interface_read(id=pi_uuid[i])
pi_fq_name.append(pi_obj.get_fq_name())
profile = {'port_id': pi_fq_name[i][2], 'switch_id': pi_fq_name[i][2],
'switch_info': pi_fq_name[i][1], 'fabric': fabric_fq_name[1]}
if i == num_phy_interfaces:
binding_profile_update['local_link_information'].append(profile)
else:
binding_profile['local_link_information'].append(profile)
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
vnic_type = 'baremetal'
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:profile': binding_profile,
'binding:vnic_type': vnic_type,
'binding:host_id': 'myhost'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
# Make sure that the binding profile for baremetal is set correctly
match = port_dict['binding:profile'] == binding_profile
self.assertTrue(match)
# CREATE: Make sure LAG is created
vpg_found = False
vpg_dict = self._vnc_lib.virtual_port_groups_list()
vpgs = vpg_dict['virtual-port-groups']
vpg_obj = None
for l in vpgs:
vpg_obj = self._vnc_lib.virtual_port_group_read(id=l['uuid'])
if vpg_obj.parent_uuid == fabric_uuid:
vpg_found = True
break
self.assertTrue(vpg_found)
data2 = {'resource':{'network_id': vn2_obj.uuid,
'tenant_id': proj_uuid,
'binding:profile': binding_profile,
'binding:vnic_type': vnic_type,
'binding:host_id': 'myhost',
'binding:vpg': vpg_obj.name}}
body2 = {'context': context, 'data': data2}
resp = self._api_svr_app.post_json('/neutron/port', body2)
port_dict2 = json.loads(resp.text)
# Make sure VPG has two VMIs associated
vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)
vmi_refs = vpg_obj.get_virtual_machine_interface_refs()
self.assertEqual(len(vmi_refs), 2)
self.delete_resource('port', proj_uuid, port_dict2['id'])
# Ensure that VPG interface is not deleted
# since it still has reference to other VMI
vpg_dict = self._vnc_lib.virtual_port_groups_list()
vpgs = vpg_dict['virtual-port-groups']
if len(vpgs) == 0:
self.assertFalse(True)
# UPDATE: Make sure VPG has ref to only 3rd physical interface
# after port update
vnic_type = 'baremetal'
data = { 'binding:profile': binding_profile_update,
'binding:vnic_type': vnic_type,
'binding:vpg': vpg_obj.name,
'binding:host_id': 'myhost'}
self.update_resource('port', port_dict['id'], proj_uuid, extra_res_fields=data)
vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)
llc = binding_profile_update['local_link_information']
ref = vpg_obj.get_physical_interface_refs()[0]
self.assertEqual(llc[0]['port_id'], ref['to'][-1])
# DELETE: Make sure VPG obj. gets deleted after port delete
self.delete_resource('port', proj_uuid, port_dict['id'])
# Ensure that VPG interface is deleted
vpg_dict = self._vnc_lib.virtual_port_groups_list()
vpgs = vpg_dict['virtual-port-groups']
if len(vpgs) > 0:
self.assertFalse(True)
# Clean up the resources
for i in range(num_phy_interfaces+1):
self._vnc_lib.physical_interface_delete(id=pi_uuid[i])
self._vnc_lib.physical_router_delete(id=pr_uuid)
self._vnc_lib.security_group_delete(id=sg_obj.uuid)
self._vnc_lib.virtual_router_delete(id=vr_obj)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn2_obj.uuid)
# end test_baremetal_bindings_with_vpg_and_multi_vlan
@unittest.skip("Flaky test in CI")
def test_baremetal_logical_interface_bindings_with_multi_homing(self):
""" This test tests the Bond interface that connects to two Tors.
Multiple physical interfaces are created to represent
members of a Lag group. These physical interfaces are connected
two differnt TORs to create a multi-homing Lag. A Baremetal Server
is launched on a virtual network. As a part of this operation,
two logical interfaces are created and mapped to the VMI and
connected to respective physical interfaces. Additionally, ESI
(Ethernet Segment Identifier) is computed based upon the MAC of
VMI and added to physical interface properties.
This verfies that these objects are appropriately created.
Additionally, it verifies that they are cleaned it up when the
Baremetal server is decommisioned.
"""
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('1.1.1.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("myhost")
vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
number_of_tors = 2
num_phy_interfaces_per_tor = 1
pi_uuid = []
pi_fq_name = []
pr_uuid = []
pr_obj = []
tors = {}
binding_profile = {'local_link_information':[]}
for t in range(number_of_tors):
pr_name = self.id() + '_physical_router_%s' %t
pr = vnc_api.PhysicalRouter(pr_name)
pr_uuid.append(self._vnc_lib.physical_router_create(pr))
pr_obj.append(self._vnc_lib.physical_router_read(id=pr_uuid[t]))
for i in range(num_phy_interfaces_per_tor):
pi_name = self.id() + 'ge-0/0/%s' %i
pi = vnc_api.PhysicalInterface(name=pi_name,
parent_obj=pr_obj[t])
pi_uuid.append(self._vnc_lib.physical_interface_create(pi))
pi_obj = self._vnc_lib.physical_interface_read(id=pi_uuid[i])
pi_fq_name.append(pi_obj.get_fq_name())
profile = {'port_id': pi_fq_name[i][2], 'switch_id': pi_fq_name[i][2],
'switch_info': pr_name}
#'switch_info': pi_fq_name[i][1]}
binding_profile['local_link_information'].append(profile)
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
vnic_type = 'baremetal'
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:profile': binding_profile,
'binding:vnic_type': vnic_type,
'binding:host_id': 'myhost'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
# Make sure that the binding profile for baremetal is set correctly
match = port_dict['binding:profile'] == binding_profile
self.assertTrue(match)
for t in range(number_of_tors):
if num_phy_interfaces_per_tor > 1:
# Make sure LAG is created
lag_found = False
lag_dict = self._vnc_lib.link_aggregation_groups_list()
lags = lag_dict['link-aggregation-groups']
for l in lags:
lag_obj = self._vnc_lib.link_aggregation_group_read(id=l['uuid'])
if lag_obj.parent_uuid == pr_uuid:
lag_found = True
break
self.assertTrue(lag_found)
# Make sure physical interface starting with "ae" got created
phy_interface_found = False
pi_dict = self._vnc_lib.physical_interfaces_list()
lis = pi_dict['physical-interfaces']
for l in lis:
pi_obj = self._vnc_lib.physical_interface_read(id=l['uuid'])
if pi_obj.name.startswith('ae'):
phy_interface_found = True
new_pi_uuid = pi_obj.uuid
break
self.assertTrue(phy_interface_found)
bound_logical_interface_found = False
li_dict = self._vnc_lib.logical_interfaces_list()
lis = li_dict['logical-interfaces']
for l in lis:
li_obj = self._vnc_lib.logical_interface_read(id=l['uuid'])
if num_phy_interfaces_per_tor > 1:
expected_parent_uuid = new_pi_uuid
else:
expected_parent_uuid = pi_uuid[t]
if li_obj.parent_uuid == expected_parent_uuid:
bound_logical_interface_found = True
break
self.assertTrue(bound_logical_interface_found)
# Verify that the ESI is set correctly in the physical interfaces
esi = []
for i in range(len(pi_uuid)):
pi_obj = self._vnc_lib.physical_interface_read(id=pi_uuid[i])
if pi_obj.ethernet_segment_identifier:
esi.append(pi_obj.ethernet_segment_identifier)
if len(esi) < number_of_tors:
self.assertTrue(False)
esi_match = True
esi_val = esi[0]
for i in range(len(esi)):
if esi_val != esi[i]:
esi_match = False
self.assertTrue(esi_match)
# Now verify the delete funtion to ensure that the resources
# created to facilitate LAG interface are deleted with the
# deleetion of portDed and/or bound.
self.delete_resource('port', proj_uuid, port_dict['id'])
if num_phy_interfaces_per_tor > 1:
# Ensure that LAG interface is deleted
lag_dict = self._vnc_lib.link_aggregation_groups_list()
lags = lag_dict['link-aggregation-groups']
if len(lags) > 0:
self.assertFalse(True)
# Make sure physical interface starting with "ae" got deleted
phy_interface_found = False
pi_dict = self._vnc_lib.physical_interfaces_list()
lis = pi_dict['physical-interfaces']
for l in lis:
pi_obj = self._vnc_lib.physical_interface_read(id=l['uuid'])
if pi_obj.name.startswith('ae'):
phy_interface_found = True
break
self.assertFalse(phy_interface_found)
# Ensure that the Logical Interface got deleted as well
li_dict = self._vnc_lib.logical_interfaces_list()
lis = li_dict['logical-interfaces']
if len(lis) > 0:
self.assertFalse(True)
# Verify that the ESI is cleared from the physical interfaces
esi = []
for i in range(len(pi_uuid)):
pi_obj = self._vnc_lib.physical_interface_read(id=pi_uuid[i])
if pi_obj.ethernet_segment_identifier:
esi.append(pi_obj.ethernet_segment_identifier)
if len(esi) > 0:
self.assertTrue(False)
# Clen up the resources
for i in range(len(pi_uuid)):
self._vnc_lib.physical_interface_delete(id=pi_uuid[i])
self._vnc_lib.security_group_delete(id=sg_obj.uuid)
self._vnc_lib.virtual_router_delete(id=vr_obj)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
for t in range(len(pr_uuid)):
self._vnc_lib.physical_router_delete(id=pr_uuid[t])
# end test_baremetal_logical_interface_bindings_with_multi_homing
@unittest.skip("Flaky test in CI")
def test_baremetal_logical_interface_bindings_multiple_bonds(self):
def _test_multiple_bonds(tors=None, bonds=None):
""" This test tests the Bond interface that connects to two Tors.
Multiple physical interfaces are created to represent
members of a Lag group. These physical interfaces are connected
two differnt TORs to create a multi-homing Lag. A Baremetal Server
is launched on a virtual network. As a part of this operation,
two logical interfaces are created and mapped to the VMI and
connected to respective physical interfaces. Additionally, ESI
(Ethernet Segment Identifier) is computed based upon the MAC of
VMI and added to physical interface properties.
This verfies that these objects are appropriately created.
Additionally, it verifies that they are cleaned it up when the
Baremetal server is decommisioned.
"""
mock_zk = self._api_server._db_conn._zk_db
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('1.1.1.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("myhost")
vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
binding_profile = {'local_link_information':[]}
# Create required resources
for tor_name, tor_info in tors.iteritems():
pr_name = tor_name
pr = vnc_api.PhysicalRouter(pr_name)
tor_info['pr_uuid'] = self._vnc_lib.physical_router_create(pr)
tor_info['pr_obj'] = self._vnc_lib.physical_router_read(id=tor_info['pr_uuid'])
for i in range(len(tor_info['interfaces'])):
pi_name = tor_info['interfaces'][i]
pi = vnc_api.PhysicalInterface(name=pi_name,
parent_obj=tor_info['pr_obj'])
tor_info['pi_uuid'][pi_name] = self._vnc_lib.physical_interface_create(pi)
for bond, bond_info in bonds.iteritems():
binding_profile['local_link_information'] = []
for t in range(len(bond_info['tors'])):
for i in range(len(bond_info['tors'][t]['interfaces'])):
switch = bond_info['tors'][t]['name']
port = bond_info['tors'][t]['interfaces'][i]
profile = {'port_id': port,
'switch_id': 'anyThing',
'switch_info': switch}
binding_profile['local_link_information'].append(profile)
bond_info['binding_profile'] = copy.deepcopy(binding_profile)
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
zk_index = 0
for bond, bond_info in bonds.iteritems():
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
vnic_type = 'baremetal'
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:profile': bond_info['binding_profile'],
'binding:vnic_type': vnic_type,
'binding:host_id': 'myhost'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
bond_info['port_dict'] = port_dict
# Make sure that the binding profile for baremetal is set correctly
match = port_dict['binding:profile'] == bond_info['binding_profile']
self.assertTrue(match)
for t in range(len(bond_info['tors'])):
switch_name = bond_info['tors'][t]['name']
switch_interfaces = bond_info['tors'][t]['interfaces']
if len(switch_interfaces) > 1:
# Make sure LAG is created
lag_found = False
lag_dict = self._vnc_lib.link_aggregation_groups_list()
lags = lag_dict['link-aggregation-groups']
for l in lags:
lag_obj = self._vnc_lib.link_aggregation_group_read(id=l['uuid'])
if lag_obj.parent_uuid == tors[bond_info['tors'][t]['name']]['pr_uuid']:
lag_found = True
zk_element_fq_name = ['default-global-system-config', switch_name, switch_interfaces[0]]
zk_element_fq_name_str = ':'.join(zk_element_fq_name)
self.assertEqual(mock_zk.get_ae_from_id(zk_index),
zk_element_fq_name_str)
zk_index += 1
break
self.assertTrue(lag_found)
# Make sure physical interface starting with "ae" got created
phy_interface_found = False
pi_dict = self._vnc_lib.physical_interfaces_list()
lis = pi_dict['physical-interfaces']
for l in lis:
pi_obj = self._vnc_lib.physical_interface_read(id=l['uuid'])
if pi_obj.name.startswith('ae'):
phy_interface_found = True
new_pi_uuid = pi_obj.uuid
break
self.assertTrue(phy_interface_found)
bound_logical_interface_found = False
li_dict = self._vnc_lib.logical_interfaces_list()
lis = li_dict['logical-interfaces']
for l in lis:
li_obj = self._vnc_lib.logical_interface_read(id=l['uuid'])
if len(bond_info['tors'][t]['interfaces']) > 1:
#if len(tor_info['interfaces']) > 1:
expected_parent_uuid = new_pi_uuid
else:
expected_parent_uuid = (
tors[bond_info['tors'][t]['name']]['pi_uuid'][switch_interfaces[0]])
if li_obj.parent_uuid == expected_parent_uuid:
bound_logical_interface_found = True
break
self.assertTrue(bound_logical_interface_found)
# Verify that the ESI is set correctly in the physical interfaces
if tors.keys() > 1 and len(switch_interfaces) > 1:
switch_intf = "ae" + switch_interfaces[0][2:]
else:
switch_intf = switch_interfaces[0]
# ESI now is only set for MH case (not LAG case)
fq_name = ['default-global-system-config', switch_name, switch_intf]
pi_uuid = self._vnc_lib.fq_name_to_id('physical-interface', fq_name)
pi_obj = self._vnc_lib.physical_interface_read(id=pi_uuid)
if pi_obj.ethernet_segment_identifier:
esi = pi_obj.ethernet_segment_identifier
if not esi.startswith('00:00:00:00'):
self.assertTrue(False)
# Now verify the delete funtion to ensure that the resources
# created to facilitate LAG interface are deleted with the
# deleetion of portDed and/or bound.
for bond, bond_info in bonds.iteritems():
port_dict = bond_info['port_dict']
self.delete_resource('port', proj_uuid, port_dict['id'])
# Ensure that LAG interface is deleted
lag_dict = self._vnc_lib.link_aggregation_groups_list()
lags = lag_dict['link-aggregation-groups']
if len(lags) > 0:
self.assertFalse(True)
# Make sure physical interface starting with "ae" got deleted
phy_interface_found = False
pi_dict = self._vnc_lib.physical_interfaces_list()
lis = pi_dict['physical-interfaces']
for l in lis:
pi_obj = self._vnc_lib.physical_interface_read(id=l['uuid'])
if pi_obj.name.startswith('ae'):
phy_interface_found = True
break
self.assertFalse(phy_interface_found)
# Verify that the ESI is cleared from the physical interfaces
esi = []
pi_dict = self._vnc_lib.physical_interfaces_list()
lis = pi_dict['physical-interfaces']
for l in lis:
pi_obj = self._vnc_lib.physical_interface_read(id=l['uuid'])
if pi_obj.ethernet_segment_identifier:
esi.append(pi_obj.ethernet_segment_identifier)
if len(esi) > 0:
self.assertTrue(False)
# Ensure that the Logical Interface got deleted as well
li_dict = self._vnc_lib.logical_interfaces_list()
lis = li_dict['logical-interfaces']
if len(lis) > 0:
self.assertFalse(True)
# Clen up the resources
for tor_name, tor_info in tors.iteritems():
interfaces = tor_info['pi_uuid']
for i in interfaces.values():
self._vnc_lib.physical_interface_delete(id=i)
self._vnc_lib.security_group_delete(id=sg_obj.uuid)
self._vnc_lib.virtual_router_delete(id=vr_obj)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
for tor_name, tor_info in tors.iteritems():
self._vnc_lib.physical_router_delete(id=tor_info['pr_uuid'])
# Build various test topoloties with different set of bond configurations
tors = {
'tor-1': {'interfaces': ['int11', 'int12', 'int13', 'int14'], 'pi_uuid': {}, 'pr_uuid': None, 'pr_obj': None},
'tor-2': {'interfaces': ['int21', 'int22', 'int23', 'int24'], 'pi_uuid': {}, 'pr_uuid': None, 'pr_obj': None},
}
b1 = {
'bond-1': {'tors': [{'name': 'tor-1', 'interfaces': ['int11', 'int12']}]},
'bond-2': {'tors': [{'name': 'tor-1', 'interfaces': ['int13']}, {'name': 'tor-2', 'interfaces': ['int23']}]},
}
b2 = {
'bond-1': {'tors': [{'name': 'tor-1', 'interfaces': ['int11', 'int12']}]},
'bond-2': {'tors': [{'name': 'tor-1', 'interfaces': ['int13', 'int14']}]},
'bond-3': {'tors': [{'name': 'tor-2', 'interfaces': ['int21', 'int22']}]},
'bond-4': {'tors': [{'name': 'tor-2', 'interfaces': ['int23', 'int24']}]},
}
b3 = {
'bond-1': {'tors': [{'name': 'tor-1', 'interfaces': ['int11']}, {'name': 'tor-2', 'interfaces': ['int21']}]},
'bond-2': {'tors': [{'name': 'tor-1', 'interfaces': ['int12']}, {'name': 'tor-2', 'interfaces': ['int22']}]},
'bond-3': {'tors': [{'name': 'tor-1', 'interfaces': ['int13']}, {'name': 'tor-2', 'interfaces': ['int23']}]},
'bond-3': {'tors': [{'name': 'tor-1', 'interfaces': ['int14']}, {'name': 'tor-2', 'interfaces': ['int24']}]},
}
b4 = {
'bond-1': {'tors': [{'name': 'tor-1', 'interfaces': ['int11', 'int12']},
{'name': 'tor-2', 'interfaces': ['int21', 'int22']}]},
'bond-2': {'tors': [{'name': 'tor-1', 'interfaces': ['int13', 'int14']},
{'name': 'tor-2', 'interfaces': ['int23', 'int24']}]},
}
test_scenarios = [b1,b2,b3,b4]
# Execute test for each bonding topology
for bonds in test_scenarios:
_test_multiple_bonds(tors=tors, bonds=bonds)
# end test_baremetal_logical_interface_bindings_multiple_bonds
def test_sg_rules_delete_when_peer_group_deleted_on_read_sg(self):
sg1_obj = vnc_api.SecurityGroup('sg1-%s' %(self.id()))
self._vnc_lib.security_group_create(sg1_obj)
sg1_obj = self._vnc_lib.security_group_read(sg1_obj.fq_name)
sg2_obj = vnc_api.SecurityGroup('sg2-%s' %(self.id()))
self._vnc_lib.security_group_create(sg2_obj)
sg2_obj = self._vnc_lib.security_group_read(sg2_obj.fq_name)
sgr_uuid = str(uuid.uuid4())
local = [vnc_api.AddressType(security_group='local')]
remote = [vnc_api.AddressType(security_group=sg2_obj.get_fq_name_str())]
sgr_obj = vnc_api.PolicyRuleType(rule_uuid=sgr_uuid,
direction='>',
protocol='any',
src_addresses=remote,
src_ports=[vnc_api.PortType(0, 255)],
dst_addresses=local,
dst_ports=[vnc_api.PortType(0, 255)],
ethertype='IPv4')
rules = vnc_api.PolicyEntriesType([sgr_obj])
sg1_obj.set_security_group_entries(rules)
self._vnc_lib.security_group_update(sg1_obj)
self._vnc_lib.security_group_delete(fq_name=sg2_obj.fq_name)
sg_dict = self.read_resource('security_group', sg1_obj.uuid)
sgr = [rule['id'] for rule in sg_dict.get('security_group_rules', [])]
self.assertNotIn(sgr_uuid, sgr)
sg1_obj = self._vnc_lib.security_group_read(sg1_obj.fq_name)
sgr = [rule.rule_uuid for rule in
sg1_obj.get_security_group_entries().get_policy_rule() or []]
self.assertIn(sgr_uuid, sgr)
def test_sg_rules_delete_when_peer_group_deleted_on_read_rule(self):
sg1_obj = vnc_api.SecurityGroup('sg1-%s' %(self.id()))
self._vnc_lib.security_group_create(sg1_obj)
sg1_obj = self._vnc_lib.security_group_read(sg1_obj.fq_name)
sg2_obj = vnc_api.SecurityGroup('sg2-%s' %(self.id()))
self._vnc_lib.security_group_create(sg2_obj)
sg2_obj = self._vnc_lib.security_group_read(sg2_obj.fq_name)
sgr_uuid = str(uuid.uuid4())
local = [vnc_api.AddressType(security_group='local')]
remote = [vnc_api.AddressType(
security_group=sg2_obj.get_fq_name_str())]
sgr_obj = vnc_api.PolicyRuleType(rule_uuid=sgr_uuid,
direction='>',
protocol='any',
src_addresses=remote,
src_ports=[vnc_api.PortType(0, 255)],
dst_addresses=local,
dst_ports=[vnc_api.PortType(0, 255)],
ethertype='IPv4')
rules = vnc_api.PolicyEntriesType([sgr_obj])
sg1_obj.set_security_group_entries(rules)
self._vnc_lib.security_group_update(sg1_obj)
self._vnc_lib.security_group_delete(fq_name=sg2_obj.fq_name)
with ExpectedException(webtest.app.AppError):
self.read_resource('security_group_rule', sgr_uuid)
sg1_obj = self._vnc_lib.security_group_read(sg1_obj.fq_name)
sgr = [rule.rule_uuid for rule in
sg1_obj.get_security_group_entries().get_policy_rule() or []]
self.assertIn(sgr_uuid, sgr)
def test_sg_rules_delete_when_peer_group_deleted_on_list_rules(self):
sg1_obj = vnc_api.SecurityGroup('sg1-%s' %(self.id()))
self._vnc_lib.security_group_create(sg1_obj)
sg1_obj = self._vnc_lib.security_group_read(sg1_obj.fq_name)
sg2_obj = vnc_api.SecurityGroup('sg2-%s' %(self.id()))
self._vnc_lib.security_group_create(sg2_obj)
sg2_obj = self._vnc_lib.security_group_read(sg2_obj.fq_name)
sgr_uuid = str(uuid.uuid4())
local = [vnc_api.AddressType(security_group='local')]
remote = [vnc_api.AddressType(
security_group=sg2_obj.get_fq_name_str())]
sgr_obj = vnc_api.PolicyRuleType(rule_uuid=sgr_uuid,
direction='>',
protocol='any',
src_addresses=remote,
src_ports=[vnc_api.PortType(0, 255)],
dst_addresses=local,
dst_ports=[vnc_api.PortType(0, 255)],
ethertype='IPv4')
rules = vnc_api.PolicyEntriesType([sgr_obj])
sg1_obj.set_security_group_entries(rules)
self._vnc_lib.security_group_update(sg1_obj)
self._vnc_lib.security_group_delete(fq_name=sg2_obj.fq_name)
sgr_dict = self.list_resource('security_group_rule')
self.assertNotIn(sgr_uuid, [rule['id'] for rule in sgr_dict])
sg1_obj = self._vnc_lib.security_group_read(sg1_obj.fq_name)
sgr = [rule.rule_uuid for rule in
sg1_obj.get_security_group_entries().get_policy_rule() or []]
self.assertIn(sgr_uuid, sgr)
def test_sg_list_with_remote(self):
proj_obj = self._vnc_lib.project_read(
fq_name=['default-domain', 'default-project'])
sg1_dict = self.create_resource('security_group',
proj_obj.uuid,
extra_res_fields={
'name': 'sg1-%s' % self.id(),
})
sg2_dict = self.create_resource('security_group',
proj_obj.uuid,
extra_res_fields={
'name': 'sg2-%s' % self.id(),
})
sgr1_dict = self.create_resource('security_group_rule',
proj_obj.uuid,
extra_res_fields={
'name': 'sgr1-%s' % self.id(),
'security_group_id': sg1_dict['id'],
'remote_ip_prefix': None,
'remote_group_id': sg2_dict['id'],
'port_range_min': None,
'port_range_max': None,
'protocol': None,
'ethertype': None,
'direction': 'egress',
}
)
sgr2_dict = self.create_resource('security_group_rule',
proj_obj.uuid,
extra_res_fields={
'name': 'sgr2-%s' % self.id(),
'security_group_id': sg2_dict['id'],
'remote_ip_prefix': None,
'remote_group_id': sg1_dict['id'],
'port_range_min': None,
'port_range_max': None,
'protocol': None,
'ethertype': None,
'direction': 'ingress',
}
)
sg_list = self.list_resource('security_group', proj_obj.uuid)
found = 0
for sg in sg_list:
if sg['id'] == sg1_dict['id']:
for rule in sg['security_group_rules']:
if rule['direction'] == 'ingress':
self.assertEqual(rule['remote_group_id'], sg2_dict['id'])
found += 1
if sg['id'] == sg2_dict['id']:
for rule in sg['security_group_rules']:
if rule['direction'] == 'ingress':
self.assertEqual(rule['remote_group_id'], sg1_dict['id'])
found += 1
self.assertEqual(found, 2)
def test_delete_irt_for_subnet_host_route(self):
proj_obj = self._vnc_lib.project_read(
fq_name=['default-domain', 'default-project'])
ipam_obj = vnc_api.NetworkIpam('ipam-%s' % self.id())
self._vnc_lib.network_ipam_create(ipam_obj)
vn_obj = vnc_api.VirtualNetwork('vn-%s' % self.id())
sn_uuid = str(uuid.uuid4())
vn_obj.add_network_ipam(
ipam_obj,
vnc_api.VnSubnetsType([
vnc_api.IpamSubnetType(
vnc_api.SubnetType('1.1.1.0', 28),
subnet_uuid=sn_uuid,
host_routes=vnc_api.RouteTableType([
vnc_api.RouteType(
prefix='2.2.2.0/28',
next_hop='1.1.1.3'
)
])
)
])
)
self._vnc_lib.virtual_network_create(vn_obj)
# Create default sg as vnc_openstack hooks are disabled in that ut
sg_obj = vnc_api.SecurityGroup('default')
self._vnc_lib.security_group_create(sg_obj)
port_dict = self.create_resource('port',
proj_obj.uuid,
extra_res_fields={
'name': 'vmi-%s' % self.id(),
'network_id': vn_obj.uuid,
'fixed_ips': [{
'ip_address': '1.1.1.3'
}]
})
route_table = vnc_api.RouteTableType('irt-%s' % self.id())
route_table.set_route([])
irt_obj = vnc_api.InterfaceRouteTable(
interface_route_table_routes=route_table,
name='irt-%s' % self.id())
self._vnc_lib.interface_route_table_create(irt_obj)
vmi_obj = self._vnc_lib.virtual_machine_interface_read(
id=port_dict['id'])
vmi_obj.add_interface_route_table(irt_obj)
self._vnc_lib.virtual_machine_interface_update(vmi_obj)
self.delete_resource('port', vmi_obj.parent_uuid, vmi_obj.uuid)
host_route_irt_fq_name = proj_obj.fq_name + ['%s_%s_%s' % (
_IFACE_ROUTE_TABLE_NAME_PREFIX, sn_uuid, vmi_obj.uuid)]
with ExpectedException(NoIdError):
self._vnc_lib.interface_route_table_read(host_route_irt_fq_name)
try:
irt_obj = self._vnc_lib.interface_route_table_read(id=irt_obj.uuid)
except NoIdError:
self.fail("The manually added interface route table as been "
"automatically removed")
self.assertIsNone(irt_obj.get_virtual_machine_interface_back_refs())
def _create_port_with_sg(self, proj_id, port_security):
net_q = self.create_resource('network', proj_id)
subnet_q = self.create_resource('subnet', proj_id, extra_res_fields={'network_id': net_q['id'], 'cidr': '10.2.0.0/24', 'ip_version': 4})
sg_q = self.create_resource('security_group', proj_id)
return self.create_resource('port', proj_id, extra_res_fields={'network_id': net_q['id'], 'security_groups': [sg_q['id']], 'port_security_enabled':port_security})
def _create_port_with_no_sg(self, proj_id):
net_q = self.create_resource('network', proj_id)
subnet_q = self.create_resource('subnet', proj_id, extra_res_fields={'network_id': net_q['id'], 'cidr': '10.2.0.0/24', 'ip_version': 4})
return self.create_resource('port', proj_id, extra_res_fields={'network_id': net_q['id'], 'port_security_enabled':False})
def test_create_port_with_port_security_disabled_and_sg(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain', 'default-project'])
with ExpectedException(webtest.app.AppError):
self._create_port_with_sg(proj_obj.uuid, False)
def test_empty_list_for_allowed_address(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain', 'default-project'])
port_q = self._create_port_with_no_sg(proj_obj.uuid)
self.assertTrue(port_q['allowed_address_pairs'] is not None)
def test_update_port_with_port_security_disabled_and_sg(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain', 'default-project'])
port_q = self._create_port_with_sg(proj_obj.uuid, True)
with ExpectedException(webtest.app.AppError):
self.update_resource('port', port_q['id'], proj_obj.uuid, extra_res_fields={'port_security_enabled':False})
def test_update_port_with_security_group_and_port_security_disabled(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain', 'default-project'])
port_q = self._create_port_with_no_sg(proj_obj.uuid)
sg_q = self.create_resource('security_group', proj_obj.uuid)
with ExpectedException(webtest.app.AppError):
self.update_resource('port', port_q['id'], proj_obj.uuid, extra_res_fields={'security_groups': [sg_q['id']]})
def test_fixed_ip_conflicts_with_floating_ip(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain', 'default-project'])
sg_q = self.create_resource('security_group', proj_obj.uuid)
net_q = self.create_resource('network', proj_obj.uuid,
extra_res_fields={'router:external': True,
'port_security_enabled': True})
subnet_q = self.create_resource('subnet', proj_obj.uuid,
extra_res_fields={
'network_id': net_q['id'],
'cidr': '1.1.1.0/24',
'ip_version': 4,
})
fip_q = self.create_resource('floatingip', proj_obj.uuid,
extra_res_fields={'floating_network_id': net_q['id']})
try:
self.create_resource('port', proj_obj.uuid,
extra_res_fields={
'network_id': net_q['id'],
'fixed_ips': [{'ip_address': fip_q['floating_ip_address']}],
'security_groups': [sg_q['id']],
})
self.assertTrue(False,
'Create with fixed-ip conflicting with floating-ip passed')
except webtest.app.AppError as e:
self.assertIsNot(re.search('Conflict', str(e)), None)
self.assertIsNot(re.search('IP address already in use', str(e)),
None)
# cleanup
self.delete_resource('floatingip', proj_obj.uuid, fip_q['id'])
self.delete_resource('subnet', proj_obj.uuid, subnet_q['id'])
self.delete_resource('network', proj_obj.uuid, net_q['id'])
self.delete_resource('security_group', proj_obj.uuid, sg_q['id'])
# end test_fixed_ip_conflicts_with_floating_ip
def test_empty_floating_ip_body_disassociates(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain', 'default-project'])
sg_q = self.create_resource('security_group', proj_obj.uuid)
pvt_net_q = self.create_resource('network', proj_obj.uuid,
extra_res_fields={'router:external': True,
'port_security_enabled': True})
pvt_subnet_q = self.create_resource('subnet', proj_obj.uuid,
extra_res_fields={
'network_id': pvt_net_q['id'],
'cidr': '1.1.1.0/24',
'ip_version': 4,
})
port_q = self.create_resource('port', proj_obj.uuid,
extra_res_fields={
'network_id': pvt_net_q['id'],
'security_groups': [sg_q['id']],
})
pub_net_q = self.create_resource('network', proj_obj.uuid,
extra_res_fields={'router:external': True})
pub_subnet_q = self.create_resource('subnet', proj_obj.uuid,
extra_res_fields={
'network_id': pub_net_q['id'],
'cidr': '10.1.1.0/24',
'ip_version': 4,
})
fip_q = self.create_resource('floatingip', proj_obj.uuid,
extra_res_fields={
'floating_network_id': pub_net_q['id'],
'port_id': port_q['id'],
})
# update fip with no 'resource' key and assert port disassociated
context = {'operation': 'UPDATE',
'user_id': '',
'is_admin': False,
'roles': '',
'tenant_id': proj_obj.uuid}
data = {'id': fip_q['id']}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/floatingip', body)
self.assertEqual(resp.status_code, 200)
fip_read = self.read_resource('floatingip', fip_q['id'])
self.assertEqual(fip_read['port_id'], None)
# cleanup
self.delete_resource('port', proj_obj.uuid, port_q['id'])
self.delete_resource('subnet', proj_obj.uuid, pvt_subnet_q['id'])
self.delete_resource('network', proj_obj.uuid, pvt_net_q['id'])
self.delete_resource('floatingip', proj_obj.uuid, fip_q['id'])
self.delete_resource('subnet', proj_obj.uuid, pub_subnet_q['id'])
self.delete_resource('network', proj_obj.uuid, pub_net_q['id'])
self.delete_resource('security_group', proj_obj.uuid, sg_q['id'])
# end test_empty_floating_ip_body_disassociates
def test_fq_name_project(self):
proj_id = str(uuid.uuid4())
proj_name = 'proj-test'
test_case.get_keystone_client().tenants.add_tenant(proj_id, proj_name)
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain', proj_name])
#end test_fq_name_project
def test_floating_ip_list(self):
proj_objs = []
for i in range(3):
proj_id = str(uuid.uuid4())
proj_name = 'proj-%s-%s' %(self.id(), i)
test_case.get_keystone_client().tenants.add_tenant(proj_id, proj_name)
proj_objs.append(self._vnc_lib.project_read(id=proj_id))
sg_q_list = [self.create_resource('security_group', proj_objs[i].uuid)
for i in range(3)]
# public network on last project
pub_net1_q = self.create_resource(
'network',
proj_objs[-1].uuid,
extra_res_fields={
'name': 'public-network-%s-1' % self.id(),
'router:external': True,
},
)
self.create_resource('subnet', proj_objs[-1].uuid,
extra_res_fields={
'name': 'public-subnet-%s-1' % self.id(),
'network_id': pub_net1_q['id'],
'cidr': '10.1.1.0/24',
'ip_version': 4,
})
pub_net2_q = self.create_resource(
'network',
proj_objs[-1].uuid,
extra_res_fields={
'name': 'public-network-%s-2' % self.id(),
'router:external': True,
},
)
self.create_resource('subnet', proj_objs[-1].uuid,
extra_res_fields={
'name': 'public-subnet-%s-2' % self.id(),
'network_id': pub_net2_q['id'],
'cidr': '20.1.1.0/24',
'ip_version': 4,
})
def create_net_subnet_port_assoc_fip(i, pub_net_q_list,
has_routers=True):
net_q_list = [
self.create_resource(
'network',
proj_objs[i].uuid,
extra_res_fields={
'name': 'network-%s-%s-%s' % (self.id(), i, j),
'port_security_enabled': True,
},
) for j in range(2)]
subnet_q_list = [
self.create_resource(
'subnet',
proj_objs[i].uuid,
extra_res_fields={
'name': 'subnet-%s-%s-%s' % (self.id(), i, j),
'network_id': net_q_list[j]['id'],
'cidr': '1.%s.%s.0/24' %(i, j),
'ip_version': 4,
},
) for j in range(2)]
if has_routers:
router_q_list = [self.create_resource(
'router',
proj_objs[i].uuid,
extra_res_fields={
'name': 'router-%s-%s-%s' % (self.id(), i, j),
'external_gateway_info': {
'network_id': pub_net_q_list[j]['id'],
}
}) for j in range(2)]
[self.add_router_interface(
router_q_list[j]['id'],
proj_objs[i].uuid,
is_admin=True,
extra_res_fields={'subnet_id': subnet_q_list[j]['id']},
) for j in range(2)]
else:
router_q_list = None
port_q_list = [self.create_resource(
'port',
proj_objs[i].uuid,
extra_res_fields={
'name': 'port-%s-%s-%s' % (self.id(), i, j),
'network_id': net_q_list[j]['id'],
'security_groups': [sg_q_list[i]['id']],
},
) for j in range(2)]
fip_q_list = [self.create_resource(
'floatingip',
proj_objs[i].uuid,
is_admin=True,
extra_res_fields={
'name': 'fip-%s-%s-%s' %(self.id(), i, j),
'floating_network_id': pub_net_q_list[j]['id'],
'port_id': port_q_list[j]['id'],
},
) for j in range(2)]
return {'network': net_q_list, 'subnet': subnet_q_list,
'ports': port_q_list, 'fips': fip_q_list,
'routers': router_q_list}
# end create_net_subnet_port_assoc_fip
created = []
# without routers
created.append(create_net_subnet_port_assoc_fip(
0, [pub_net1_q, pub_net2_q], has_routers=False))
# with routers
created.append(create_net_subnet_port_assoc_fip(
1, [pub_net1_q, pub_net2_q], has_routers=True))
# 1. list as admin for all routers
fip_dicts = self.list_resource('floatingip', is_admin=True)
# convert list to dict by id
fip_dicts = dict((fip['id'], fip) for fip in fip_dicts)
# assert all floatingip we created recevied back
for fip in created[0]['fips'] + created[1]['fips']:
self.assertIn(fip['id'], fip_dicts.keys())
# assert router-id present in fips of proj[1]
self.assertEqual(created[1]['routers'][0]['id'],
fip_dicts[created[1]['fips'][0]['id']]['router_id'])
self.assertEqual(created[1]['routers'][1]['id'],
fip_dicts[created[1]['fips'][1]['id']]['router_id'])
# assert router-id not present in fips of proj[0]
self.assertEqual(None,
fip_dicts[created[0]['fips'][0]['id']]['router_id'])
self.assertEqual(None,
fip_dicts[created[0]['fips'][1]['id']]['router_id'])
# 2. list routers within project
fip_dicts = self.list_resource(
'floatingip', proj_uuid=proj_objs[0].uuid)
self.assertEqual(None,
fip_dicts[0]['router_id'])
self.assertEqual(None,
fip_dicts[1]['router_id'])
# convert list to dict by port-id
fip_dicts = dict((fip['port_id'], fip) for fip in fip_dicts)
# assert fips point to right port
self.assertEqual(created[0]['ports'][0]['fixed_ips'][0]['ip_address'],
fip_dicts[created[0]['ports'][0]['id']]['fixed_ip_address'])
self.assertEqual(created[0]['ports'][1]['fixed_ips'][0]['ip_address'],
fip_dicts[created[0]['ports'][1]['id']]['fixed_ip_address'])
# end test_floating_ip_list
def test_network_delete_when_fip_associated_w_port(self):
proj_obj = vnc_api.Project('proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj_obj)
vn_obj = vnc_api.VirtualNetwork(self.id(), proj_obj)
vn_obj.set_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(vnc_api.SubnetType('20.1.1.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vmi_obj = vnc_api.VirtualMachineInterface(
'vmi-%s' %(self.id()), proj_obj)
vmi_obj.set_virtual_network(vn_obj)
self._vnc_lib.virtual_machine_interface_create(vmi_obj)
fip_pool_obj = vnc_api.FloatingIpPool(self.id(), vn_obj)
self._vnc_lib.floating_ip_pool_create(fip_pool_obj)
fip_obj = vnc_api.FloatingIp('fip-%s' %(self.id()), fip_pool_obj)
fip_obj.set_project(proj_obj)
fip_obj.add_virtual_machine_interface(vmi_obj)
self._vnc_lib.floating_ip_create(fip_obj)
# deleting network when it has associated fip
# should give an error.
with ExpectedException(webtest.app.AppError):
self.delete_resource('network', proj_obj.uuid, vn_obj.uuid)
# cleanup
self._vnc_lib.floating_ip_delete(id=fip_obj.uuid)
self._vnc_lib.floating_ip_pool_delete(id=fip_pool_obj.uuid)
self._vnc_lib.virtual_machine_interface_delete(id=vmi_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
self._vnc_lib.project_delete(id=proj_obj.uuid)
# end test_network_delete_when_fip_associated_w_port
def test_create_fip_w_port_associated_w_another_fip_negative(self):
proj_obj = vnc_api.Project('proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj_obj)
proj_id = proj_obj.uuid
# external network
net_q = self.create_resource('network', proj_id,
extra_res_fields={'router:external':True})
subnet_q = self.create_resource('subnet', proj_id,
extra_res_fields=
{'network_id': net_q['id'],
'cidr': '10.2.0.0/24',
'ip_version': 4})
# private network
pvt_net_q = self.create_resource(
'network', proj_id,
extra_res_fields={'port_security_enabled': True},
)
pvt_subnet_q = self.create_resource('subnet', proj_id,
extra_res_fields=
{'network_id': pvt_net_q['id'],
'cidr': '20.1.0.0/24',
'ip_version': 4})
sg_q = self.create_resource('security_group', proj_id)
port_q = self.create_resource('port', proj_id,
extra_res_fields=
{'network_id':
pvt_subnet_q['network_id'],
'security_groups': [sg_q['id']]})
fip_q = self.create_resource('floatingip', proj_id,
extra_res_fields=
{'floating_network_id': net_q['id'],
'port_id': port_q['id']})
# updating a fip should be succesull that already has an assoc. port
self.update_resource('floatingip', fip_q['id'], proj_id,
extra_res_fields={'display_name':'test-fip',
'port_id': port_q['id']})
# creating a fip with a port that already has another fip associated
# should fail.
with ExpectedException(webtest.app.AppError):
fip_q_2 = self.create_resource('floatingip', proj_id,
extra_res_fields={'floating_network_id': net_q['id'],
'port_id': port_q['id']})
# cleanup
self.delete_resource('floatingip', proj_id, fip_q['id'])
self.delete_resource('port', proj_id, port_q['id'])
self.delete_resource('subnet', proj_id, subnet_q['id'])
self.delete_resource('subnet', proj_id, pvt_subnet_q['id'])
self.delete_resource('security_group', proj_id, sg_q['id'])
self.delete_resource('network', proj_id, net_q['id'])
self.delete_resource('network', proj_id, pvt_net_q['id'])
# end test_create_fip_w_port_associated_w_another_fip_negative
def test_network_timestamps(self):
vn_obj = vnc_api.VirtualNetwork(self.id())
self._vnc_lib.virtual_network_create(vn_obj)
vn_dict = self.read_resource('network', vn_obj.uuid)
# verify created timestamp and updated timestamp are same
self.assertEqual(vn_dict['created_at'], vn_dict['updated_at'])
vn_obj.display_name = 'test-vn-timestamps'
self._vnc_lib.virtual_network_update(vn_obj)
vn_dict_2 = self.read_resource('network', vn_obj.uuid)
# verify created timestamp and updated timestamp are not same
self.assertIsNot(vn_dict_2['created_at'], vn_dict_2['updated_at'])
# end test_network_timestamps
def test_subnet_timestamps(self):
timestamp = datetime.utcnow().isoformat()
vn_obj = vnc_api.VirtualNetwork(self.id())
sn_id = str(uuid.uuid4())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(vnc_api.SubnetType('1.1.1.0', 28),
subnet_uuid=sn_id, created=timestamp,
last_modified=timestamp)]))
self._vnc_lib.virtual_network_create(vn_obj)
sn_dict = self.read_resource('subnet', sn_id)
# verify created timestamp and updated timestamp are same
self.assertEqual(sn_dict['created_at'], sn_dict['updated_at'])
proj_id = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
sn_dict_2 = self.update_resource('subnet', sn_id, proj_id,
extra_res_fields={'name':
'test-subnet-timestamps'})
# verify created timestamp and updated timestamp are not same
self.assertIsNot(sn_dict_2['created_at'], sn_dict_2['updated_at'])
# end test_subnet_timestamps
def test_external_network_perms(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain',
'default-project'])
net_q = self.create_resource('network', proj_obj.uuid,
extra_res_fields={'router:external': True})
self.create_resource('subnet', proj_obj.uuid, extra_res_fields={
'network_id': net_q['id'],
'cidr': '1.1.1.0/24',
'ip_version': 4,
})
net_obj = self._vnc_lib.virtual_network_read(net_q['fq_name'])
self.assertEqual(net_obj.perms2.global_access, PERMS_RX)
self.update_resource('network', net_q['id'], proj_obj.uuid,
extra_res_fields={'router:external':False})
net_obj = self._vnc_lib.virtual_network_read(net_q['fq_name'])
self.assertEqual(net_obj.perms2.global_access, PERMS_NONE)
self.update_resource('network', net_q['id'], proj_obj.uuid,
extra_res_fields={'router:external':True})
net_obj = self._vnc_lib.virtual_network_read(net_q['fq_name'])
self.assertEqual(net_obj.perms2.global_access, PERMS_RX)
self.delete_resource('network', proj_obj.uuid, net_q['id'])
# end test_external_network_perms
def test_external_network_fip_pool(self):
proj_obj = self._vnc_lib.project_read(fq_name=['default-domain',
'default-project'])
net_q = self.create_resource('network', proj_obj.uuid,
extra_res_fields={'router:external': True, 'shared': True})
self.create_resource('subnet', proj_obj.uuid, extra_res_fields={
'network_id': net_q['id'],
'cidr': '1.1.1.0/24',
'ip_version': 4,
})
fip_pool_fq_name = net_q['fq_name'] + ['floating-ip-pool']
fip_pool_obj = self._vnc_lib.floating_ip_pool_read(
fq_name=fip_pool_fq_name)
self.assertEqual(fip_pool_obj.perms2.global_access, PERMS_RWX)
self.delete_resource('network', proj_obj.uuid, net_q['id'])
net_q = self.create_resource('network', proj_obj.uuid,
extra_res_fields={'router:external': True, 'shared': False})
self.create_resource('subnet', proj_obj.uuid, extra_res_fields={
'network_id': net_q['id'],
'cidr': '1.1.1.0/24',
'ip_version': 4,
})
fip_pool_obj = self._vnc_lib.floating_ip_pool_read(
fq_name=fip_pool_fq_name)
self.assertEqual(fip_pool_obj.perms2.global_access, PERMS_RWX)
self.delete_resource('network', proj_obj.uuid, net_q['id'])
# end test_external_network_fip_pool
def test_list_router_gw_interfaces_with_not_owned_public_network(self):
# Admin project
admin_proj_id = str(uuid.uuid4())
admin_proj_name = 'admin-proj-%s' % self.id()
test_case.get_keystone_client().tenants.add_tenant(admin_proj_id,
admin_proj_name)
admin_proj_obj = self._vnc_lib.project_read(id=admin_proj_id)
# Classic project
proj_id = str(uuid.uuid4())
proj_name = 'proj-%s' % self.id()
test_case.get_keystone_client().tenants.add_tenant(proj_id, proj_name)
proj_obj = self._vnc_lib.project_read(id=proj_id)
# public network/subnet on an admin project
public_net = self.create_resource(
'network',
admin_proj_obj.uuid,
extra_res_fields={
'name': 'public-%s' % self.id(),
'router:external': True,
},
)
self.create_resource(
'subnet', admin_proj_obj.uuid,
extra_res_fields={
'name': 'public-%s' % self.id(),
'network_id': public_net['id'],
'cidr': '80.0.0.0/24',
'ip_version': 4,
},
)
public_net_obj = self._vnc_lib.virtual_network_read(
id=public_net['id'])
# private network/subnet on classic project
privat_net = self.create_resource(
'network',
proj_obj.uuid,
extra_res_fields={'name': 'private-%s' % self.id()})
private_subnet = self.create_resource(
'subnet', proj_obj.uuid,
extra_res_fields={
'name': 'private-%s' % self.id(),
'network_id': privat_net['id'],
'cidr': '10.0.0.0/24',
'ip_version': 4,
},
)
# Router on classic project with a gateway on the public network
# (owned by the admin project)
router = self.create_resource(
'router',
proj_obj.uuid,
extra_res_fields={
'name': 'router-%s' % self.id(),
'external_gateway_info': {
'network_id': public_net['id'],
}
},
)
# Add router interface on the private network/subnet
self.add_router_interface(
router['id'],
proj_obj.uuid,
is_admin=True,
extra_res_fields={'subnet_id': private_subnet['id']})
router_obj = self._vnc_lib.logical_router_read(id=router['id'])
# Create fake gw VMI
fake_gw = vnc_api.VirtualMachineInterface(
'fake-gw-interface-%s' % self.id(), proj_obj)
fake_gw.add_virtual_network(public_net_obj)
fake_gw_id = self._vnc_lib.virtual_machine_interface_create(fake_gw)
fake_gw = self._vnc_lib.virtual_machine_interface_read(id=fake_gw_id)
mock.patch.object(fake_gw, 'get_instance_ip_back_refs',
return_value='fake_iip').start()
def router_with_fake_si_ref(orig_method, *args, **kwargs):
if 'obj_uuids' in kwargs and kwargs['obj_uuids'] == [router['id']]:
mock_router_vn_ref = mock.patch.object(
router_obj, 'get_virtual_network_refs',).start()
mock_router_vn_ref.return_value = [{
'uuid': public_net_obj.uuid}]
mock_router_si_ref = mock.patch.object(
router_obj, 'get_service_instance_refs',).start()
mock_router_si_ref.return_value = [{'uuid': 'fake_si_uuid'}]
return [router_obj]
return orig_method(*args, **kwargs)
def fake_si_obj(orig_method, *args, **kwargs):
if 'id' in kwargs and kwargs['id'] == 'fake_si_uuid':
si = mock.Mock()
si.get_virtual_machine_back_refs.return_value = \
[{'to': 'fake_vm_name', 'uuid': 'fake_vm_uuid'}]
return si
return orig_method(*args, **kwargs)
def return_router_gw_interface(orig_method, *args, **kwargs):
if ('back_ref_id' in kwargs and
kwargs['back_ref_id'] == ['fake_vm_uuid']):
return [fake_gw]
return orig_method(*args, **kwargs)
with test_common.patch(self.neutron_db_obj._vnc_lib,
'logical_routers_list',
router_with_fake_si_ref), \
test_common.patch(self.neutron_db_obj._vnc_lib,
'service_instance_read', fake_si_obj), \
test_common.patch(self.neutron_db_obj._vnc_lib,
'virtual_machine_interfaces_list',
return_router_gw_interface):
# list with a user that not own the router gw port's network
router_interfaces = self.list_resource(
'port',
proj_uuid=proj_obj.uuid,
req_filters={'device_id': [router['id']]},
)
self.assertEqual(len(router_interfaces), 1)
# list as admin, project does not matter
router_interfaces = self.list_resource(
'port',
proj_uuid=proj_obj.uuid,
req_filters={'device_id': [router['id']]},
is_admin=True,
)
self.assertEqual(len(router_interfaces), 2)
def test_list_router_gw_interfaces_with_owned_public_network(self):
# Admin project
admin_proj_id = str(uuid.uuid4())
admin_proj_name = 'admin-proj-%s' % self.id()
test_case.get_keystone_client().tenants.add_tenant(admin_proj_id,
admin_proj_name)
admin_proj_obj = self._vnc_lib.project_read(id=admin_proj_id)
# public network/subnet
public_net = self.create_resource(
'network',
admin_proj_obj.uuid,
extra_res_fields={
'name': 'public-%s' % self.id(),
'router:external': True},
)
self.create_resource(
'subnet',
admin_proj_obj.uuid,
extra_res_fields={
'name': 'public-%s' % self.id(),
'network_id': public_net['id'],
'cidr': '80.0.0.0/24',
'ip_version': 4,
},
)
public_net_obj = self._vnc_lib.virtual_network_read(
id=public_net['id'])
# private network/subnet
privat_net = self.create_resource(
'network',
admin_proj_obj.uuid,
extra_res_fields={'name': 'private-%s' % self.id()})
private_subnet = self.create_resource(
'subnet',
admin_proj_obj.uuid,
extra_res_fields={
'name': 'private-%s' % self.id(),
'network_id': privat_net['id'],
'cidr': '10.0.0.0/24',
'ip_version': 4,
},
)
# Router on admin project with a gateway on the public network
# (owned by the admin project)
router = self.create_resource(
'router',
admin_proj_obj.uuid,
extra_res_fields={
'name': 'router-%s' % self.id(),
'external_gateway_info': {
'network_id': public_net['id'],
}
},
)
# Add router interface on the private network/subnet
self.add_router_interface(
router['id'],
admin_proj_obj.uuid,
is_admin=True,
extra_res_fields={'subnet_id': private_subnet['id']})
router_obj = self._vnc_lib.logical_router_read(id=router['id'])
# Create fake gw VMI
fake_gw = vnc_api.VirtualMachineInterface(
'fake-gw-interface-%s' % self.id(), admin_proj_obj)
fake_gw.add_virtual_network(public_net_obj)
fake_gw_id = self._vnc_lib.virtual_machine_interface_create(fake_gw)
fake_gw = self._vnc_lib.virtual_machine_interface_read(id=fake_gw_id)
mock.patch.object(fake_gw, 'get_instance_ip_back_refs',
return_value='fake_iip').start()
def router_with_fake_si_ref(orig_method, *args, **kwargs):
if 'obj_uuids' in kwargs and kwargs['obj_uuids'] == [router['id']]:
mock_router_vn_ref = mock.patch.object(
router_obj, 'get_virtual_network_refs',).start()
mock_router_vn_ref.return_value = [{
'uuid': public_net_obj.uuid}]
mock_router_si_ref = mock.patch.object(
router_obj, 'get_service_instance_refs',).start()
mock_router_si_ref.return_value = [{'uuid': 'fake_si_uuid'}]
return [router_obj]
return orig_method(*args, **kwargs)
def fake_si_obj(orig_method, *args, **kwargs):
if 'id' in kwargs and kwargs['id'] == 'fake_si_uuid':
si = mock.Mock()
si.get_virtual_machine_back_refs.return_value = \
[{'to': 'fake_vm_name', 'uuid': 'fake_vm_uuid'}]
return si
return orig_method(*args, **kwargs)
def return_router_gw_interface(orig_method, *args, **kwargs):
if ('back_ref_id' in kwargs and
kwargs['back_ref_id'] == ['fake_vm_uuid']):
return [fake_gw]
return orig_method(*args, **kwargs)
with test_common.patch(self.neutron_db_obj._vnc_lib,
'logical_routers_list',
router_with_fake_si_ref), \
test_common.patch(self.neutron_db_obj._vnc_lib,
'service_instance_read', fake_si_obj), \
test_common.patch(self.neutron_db_obj._vnc_lib,
'virtual_machine_interfaces_list',
return_router_gw_interface):
# list as admin, project does not matter
router_interfaces = self.list_resource(
'port',
proj_uuid=admin_proj_obj.uuid,
req_filters={'device_id': [router['id']]},
is_admin=True,
)
self.assertEqual(len(router_interfaces), 2)
# list as user owner of the router gw port's network
router_interfaces = self.list_resource(
'port',
proj_uuid=admin_proj_obj.uuid,
req_filters={'device_id': [router['id']]},
is_admin=False,
)
self.assertEqual(len(router_interfaces), 2)
def test_fip_owner(self):
admin_proj_obj = vnc_api.Project('admin-proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(admin_proj_obj)
admin_proj_id = admin_proj_obj.uuid
# external network
net_q = self.create_resource('network', admin_proj_id,
extra_res_fields={'router:external':True,
'shared': True})
subnet_q = self.create_resource('subnet', admin_proj_id,
extra_res_fields=
{'network_id': net_q['id'],
'cidr': '10.2.0.0/24',
'ip_version': 4})
proj_obj = vnc_api.Project('proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj_obj)
proj_id = proj_obj.uuid
pvt_net_q = self.create_resource(
'network', proj_id,
extra_res_fields={'port_security_enabled': True},
)
pvt_subnet_q = self.create_resource('subnet', proj_id,
extra_res_fields=
{'network_id': pvt_net_q['id'],
'cidr': '20.1.0.0/24',
'ip_version': 4})
sg_q = self.create_resource('security_group', proj_id)
port_q = self.create_resource('port', proj_id,
extra_res_fields=
{'network_id':
pvt_subnet_q['network_id'],
'security_groups': [sg_q['id']]})
fip_q = self.create_resource('floatingip', proj_id,
extra_res_fields=
{'floating_network_id': net_q['id'],
'port_id': port_q['id']})
fip = self._vnc_lib.floating_ip_read(id=fip_q['id'])
self.assertEqual(fip.perms2.owner, proj_id)
def test_update_any_other_fields_in_fip_doesnt_disassociate(self):
proj_obj = vnc_api.Project('proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj_obj)
proj_id = proj_obj.uuid
# external network
net_q = self.create_resource('network', proj_id,
extra_res_fields={'router:external':True})
subnet_q = self.create_resource('subnet', proj_id,
extra_res_fields=
{'network_id': net_q['id'],
'cidr': '10.2.0.0/24',
'ip_version': 4})
# private network
pvt_net_q = self.create_resource(
'network', proj_id,
extra_res_fields={'port_security_enabled': True},
)
pvt_subnet_q = self.create_resource('subnet', proj_id,
extra_res_fields=
{'network_id': pvt_net_q['id'],
'cidr': '20.1.0.0/24',
'ip_version': 4})
sg_q = self.create_resource('security_group', proj_id)
port_q = self.create_resource('port', proj_id,
extra_res_fields=
{'network_id':
pvt_subnet_q['network_id'],
'security_groups': [sg_q['id']]})
fip_q = self.create_resource('floatingip', proj_id,
extra_res_fields=
{'floating_network_id': net_q['id'],
'port_id': port_q['id']})
# Updating description
self.update_resource('floatingip', fip_q['id'], proj_id,
extra_res_fields={'description':'test-fip'})
fip_dict = self.read_resource('floatingip', fip_q['id'])
self.assertEqual(fip_dict['port_id'], port_q['id'])
# Disassociate fip from port
self.update_resource('floatingip', fip_q['id'], proj_id,
extra_res_fields={'port_id': []})
fip_dict = self.read_resource('floatingip', fip_q['id'])
self.assertEqual(fip_dict['port_id'], None)
# Associate fip to port
self.update_resource('floatingip', fip_q['id'], proj_id,
extra_res_fields={'port_id': port_q['id']})
fip_dict = self.read_resource('floatingip', fip_q['id'])
self.assertEqual(fip_dict['port_id'], port_q['id'])
# cleanup
self.delete_resource('floatingip', proj_id, fip_q['id'])
self.delete_resource('port', proj_id, port_q['id'])
self.delete_resource('subnet', proj_id, subnet_q['id'])
self.delete_resource('subnet', proj_id, pvt_subnet_q['id'])
self.delete_resource('security_group', proj_id, sg_q['id'])
self.delete_resource('network', proj_id, net_q['id'])
self.delete_resource('network', proj_id, pvt_net_q['id'])
# end test_update_any_other_fields_in_fip_doesnt_disassociate
def test_dpdk_compute_port_bindings(self):
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('192.168.10.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("dpdk-host")
vr_obj.set_virtual_router_dpdk_enabled(True)
vnc_vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:host_id': 'dpdk-host'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
self.assertNotEqual(
port_dict['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_vrouter_plug'),
True)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_mode'),
"server")
self.assertEqual(
port_dict['binding:vif_type'], 'vhostuser')
# disable dpdk on compute and we should observe the vif
# details deleted from port bindings
vr_obj.set_virtual_router_dpdk_enabled(False)
vnc_vr_obj = self._vnc_lib.virtual_router_update(vr_obj)
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict1 = json.loads(resp.text)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_vrouter_plug'),
None)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_mode'),
None)
self.assertEqual(
port_dict1['binding:vif_type'], 'vrouter')
self.assertNotEqual(port_dict, port_dict1)
# end test_dpdk_compute_port_bindings
def test_non_dpdk_compute_port_bindings(self):
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('192.168.11.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("non-dpdk-host")
vnc_vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:host_id': 'non-dpdk-host'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_vrouter_plug'),
None)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_mode'),
None)
self.assertEqual(
port_dict['binding:vif_type'], 'vrouter')
# Enable dpdk on compute and we should observe the vif
# details deleted from port bindings
vr_obj.set_virtual_router_dpdk_enabled(True)
vnc_vr_obj = self._vnc_lib.virtual_router_update(vr_obj)
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict1 = json.loads(resp.text)
self.assertNotEqual(
port_dict1['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_vrouter_plug'),
True)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_mode'),
"server")
self.assertEqual(
port_dict1['binding:vif_type'], 'vhostuser')
self.assertNotEqual(port_dict, port_dict1)
# end test_non_dpdk_compute_port_bindings
def test_dpdk_compute_port_bindings_with_long_vrouter_name(self):
proj_obj = vnc_api.Project('proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj_obj)
vn_obj = vnc_api.VirtualNetwork(self.id(),proj_obj)
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('192.168.11.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("dpdk-long.foobar")
vr_obj.set_virtual_router_dpdk_enabled(True)
vnc_vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'proj-%s' %(self.id())])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:host_id': 'dpdk-long'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
self.assertNotEqual(
port_dict['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_vrouter_plug'),
True)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_mode'),
"server")
self.assertEqual(
port_dict['binding:vif_type'], 'vhostuser')
# disable dpdk on compute and we should observe the vif
# details deleted from port bindings
vr_obj.set_virtual_router_dpdk_enabled(False)
vnc_vr_obj = self._vnc_lib.virtual_router_update(vr_obj)
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict1 = json.loads(resp.text)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_vrouter_plug'),
None)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_mode'),
None)
self.assertEqual(
port_dict1['binding:vif_type'], 'vrouter')
self.assertNotEqual(port_dict, port_dict1)
# end test_dpdk_compute_port_bindings_with_long_vrouter_name
def test_dpdk_compute_port_bindings_with_split_hostid(self):
vn_obj = vnc_api.VirtualNetwork(self.id())
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(
vnc_api.SubnetType('192.168.10.0', 24))]))
self._vnc_lib.virtual_network_create(vn_obj)
vr_obj = vnc_api.VirtualRouter("dpdk-host.foo")
vr_obj.set_virtual_router_dpdk_enabled(True)
vnc_vr_obj = self._vnc_lib.virtual_router_create(vr_obj)
sg_obj = vnc_api.SecurityGroup('default')
try:
self._vnc_lib.security_group_create(sg_obj)
except vnc_api.RefsExistError:
pass
proj_uuid = self._vnc_lib.fq_name_to_id('project',
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': True,
'roles': ''}
data = {'resource':{'network_id': vn_obj.uuid,
'tenant_id': proj_uuid,
'binding:host_id': 'dpdk-host.foo'}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict = json.loads(resp.text)
self.assertNotEqual(
port_dict['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_vrouter_plug'),
True)
self.assertEqual(
port_dict['binding:vif_details'].get('vhostuser_mode'),
"server")
self.assertEqual(
port_dict['binding:vif_type'], 'vhostuser')
# disable dpdk on compute and we should observe the vif
# details deleted from port bindings
vr_obj.set_virtual_router_dpdk_enabled(False)
vnc_vr_obj = self._vnc_lib.virtual_router_update(vr_obj)
resp = self._api_svr_app.post_json('/neutron/port', body)
port_dict1 = json.loads(resp.text)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_socket'),
None)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_vrouter_plug'),
None)
self.assertEqual(
port_dict1['binding:vif_details'].get('vhostuser_mode'),
None)
self.assertEqual(
port_dict1['binding:vif_type'], 'vrouter')
self.assertNotEqual(port_dict, port_dict1)
# end test_dpdk_compute_port_bindings_with_split_hostid
# end class TestBasic
class TestExtraFieldsPresenceByKnob(test_case.NeutronBackendTestCase):
@classmethod
def setUpClass(cls):
super(TestExtraFieldsPresenceByKnob, cls).setUpClass(
extra_config_knobs=[('NEUTRON', 'contrail_extensions_enabled', True)])
def test_extra_fields_on_network(self):
test_obj = self._create_test_object()
context = {'operation': 'READ',
'user_id': '',
'roles': ''}
data = {'fields': None,
'id': test_obj.uuid}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/network', body)
net_dict = json.loads(resp.text)
self.assertIn('fq_name', net_dict)
# end test_extra_fields_on_network
# end class TestExtraFieldsPresenceByKnob
class TestExtraFieldsAbsenceByKnob(test_case.NeutronBackendTestCase):
@classmethod
def setUpClass(cls):
super(TestExtraFieldsAbsenceByKnob, cls).setUpClass(
extra_config_knobs=[('NEUTRON', 'contrail_extensions_enabled', False)])
def test_no_extra_fields_on_network(self):
test_obj = self._create_test_object()
context = {'operation': 'READ',
'user_id': '',
'roles': ''}
data = {'fields': None,
'id': test_obj.uuid}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json('/neutron/network', body)
net_dict = json.loads(resp.text)
self.assertNotIn('fq_name', net_dict)
# end test_extra_fields_on_network
# end class TestExtraFieldsAbsenceByKnob
class TestListWithFilters(test_case.NeutronBackendTestCase):
def test_filters_with_id(self):
# sg setup
proj_obj = self._vnc_lib.project_read(
fq_name=['default-domain', 'default-project'])
sg_obj = vnc_api.SecurityGroup('sg-%s' %(self.id()), proj_obj)
self._vnc_lib.security_group_create(sg_obj)
# fip setup
vn_obj = vnc_api.VirtualNetwork('vn1-%s' %(self.id()), proj_obj)
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(vnc_api.SubnetType('1.1.1.0', 28))]))
self._vnc_lib.virtual_network_create(vn_obj)
fip_pool_obj = vnc_api.FloatingIpPool('fip-pool-%s' %(self.id()),
vn_obj)
self._vnc_lib.floating_ip_pool_create(fip_pool_obj)
fip1_obj = vnc_api.FloatingIp('fip1-%s' %(self.id()), fip_pool_obj)
fip1_obj.add_project(proj_obj)
self._vnc_lib.floating_ip_create(fip1_obj)
proj2_obj = vnc_api.Project('proj2-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj2_obj)
fip2_obj = vnc_api.FloatingIp('fip2-%s' %(self.id()), fip_pool_obj)
fip2_obj.add_project(proj2_obj)
self._vnc_lib.floating_ip_create(fip2_obj)
vmi_obj = vnc_api.VirtualMachineInterface(
'vmi-%s' %(self.id()), proj_obj)
vmi_obj.add_virtual_network(vn_obj)
self._vnc_lib.virtual_machine_interface_create(vmi_obj)
fip3_obj = vnc_api.FloatingIp('fip3-%s' %(self.id()), fip_pool_obj)
fip3_obj.add_virtual_machine_interface(vmi_obj)
fip3_obj.add_project(proj_obj)
self._vnc_lib.floating_ip_create(fip3_obj)
def spy_list(orig_method, *args, **kwargs):
self.assertIn(sg_obj.uuid, kwargs['obj_uuids'])
return orig_method(*args, **kwargs)
with test_common.patch(
self.neutron_db_obj._vnc_lib, 'security_groups_list', spy_list):
context = {'operation': 'READALL',
'user_id': '',
'tenant_id': proj_obj.uuid,
'roles': '',
'is_admin': 'False'}
data = {'filters': {'id':[sg_obj.uuid]}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json(
'/neutron/security_group', body)
sg_neutron_list = json.loads(resp.text)
self.assertEqual(len(sg_neutron_list), 1)
self.assertEqual(sg_neutron_list[0]['id'], sg_obj.uuid)
def spy_list(orig_method, *args, **kwargs):
self.assertIn(fip1_obj.uuid, kwargs['obj_uuids'])
return orig_method(*args, **kwargs)
with test_common.patch(
self.neutron_db_obj._vnc_lib, 'floating_ips_list', spy_list):
context = {'operation': 'READALL',
'user_id': '',
'tenant_id': '',
'roles': '',
'is_admin': 'False'}
# ask for one explicit id
data = {'filters': {'id':[fip1_obj.uuid]}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json(
'/neutron/floatingip', body)
fip_neutron_list = json.loads(resp.text)
self.assertEqual(len(fip_neutron_list), 1)
self.assertEqual(fip_neutron_list[0]['id'], fip1_obj.uuid)
# ask for list of ids AND in a project, should return 1
data = {'filters': {'id':[fip1_obj.uuid, fip2_obj.uuid],
'tenant_id': [proj2_obj.uuid.replace('-', '')]}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json(
'/neutron/floatingip', body)
fip_neutron_list = json.loads(resp.text)
self.assertEqual(len(fip_neutron_list), 1)
self.assertEqual(fip_neutron_list[0]['id'], fip2_obj.uuid)
# ask for list of ids AND on a VMI, should return 1
data = {'filters': {'id':[fip1_obj.uuid, fip2_obj.uuid,
fip3_obj.uuid],
'tenant_id': [proj2_obj.uuid.replace('-', '')]}}
body = {'context': context, 'data': data}
resp = self._api_svr_app.post_json(
'/neutron/floatingip', body)
fip_neutron_list = json.loads(resp.text)
self.assertEqual(len(fip_neutron_list), 1)
self.assertEqual(fip_neutron_list[0]['id'], fip2_obj.uuid)
self._vnc_lib.security_group_delete(id=sg_obj.uuid)
self._vnc_lib.floating_ip_delete(id=fip1_obj.uuid)
self._vnc_lib.floating_ip_delete(id=fip2_obj.uuid)
self._vnc_lib.floating_ip_delete(id=fip3_obj.uuid)
self._vnc_lib.floating_ip_pool_delete(id=fip_pool_obj.uuid)
self._vnc_lib.virtual_machine_interface_delete(id=vmi_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
self._vnc_lib.project_delete(id=proj2_obj.uuid)
# end test_filters_with_id
def test_filters_with_shared_and_router_external(self):
proj_obj = vnc_api.Project('proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj_obj)
vn1_obj = vnc_api.VirtualNetwork('vn1-%s' %(self.id()), proj_obj)
vn1_obj.set_is_shared(False)
self._vnc_lib.virtual_network_create(vn1_obj)
vn2_obj = vnc_api.VirtualNetwork('vn2-%s' %(self.id()), proj_obj)
vn2_obj.set_router_external(False)
self._vnc_lib.virtual_network_create(vn2_obj)
vn3_obj = vnc_api.VirtualNetwork('vn3-%s' %(self.id()), proj_obj)
vn3_obj.set_is_shared(False)
vn3_obj.set_router_external(True)
self._vnc_lib.virtual_network_create(vn3_obj)
subnet1_q = self.create_resource('subnet', proj_obj.uuid,
extra_res_fields={'network_id': vn1_obj.uuid,
'cidr': '10.0.0.0/24',
'ip_version': 4})
subnet2_q = self.create_resource('subnet', proj_obj.uuid,
extra_res_fields={'network_id': vn2_obj.uuid,
'cidr': '10.1.0.0/24',
'ip_version': 4})
subnet3_q = self.create_resource('subnet', proj_obj.uuid,
extra_res_fields={'network_id': vn3_obj.uuid,
'cidr': '10.2.0.0/24',
'ip_version': 4})
#filter for list of shared network/subnet='False' should return 2
vn1_neutron_list = self.list_resource(
'network', proj_uuid=proj_obj.uuid,
req_filters={'shared': [False]})
vn1_3_subnet_list = self.list_resource(
'subnet', proj_uuid=proj_obj.uuid,
req_filters={'shared': [False]})
self.assertEqual(len(vn1_neutron_list), 2)
self.assertEqual(len(vn1_3_subnet_list), 2)
vn_ids = []
vn_ids.append(vn1_neutron_list[0]['id'])
vn_ids.append(vn1_neutron_list[1]['id'])
self.assertIn(vn1_obj.uuid, vn_ids)
self.assertIn(vn3_obj.uuid, vn_ids)
#filter for list of router:external='False' net/subnet should return 1
vn2_neutron_list = self.list_resource(
'network', proj_uuid=proj_obj.uuid,
req_filters={'router:external': [False]})
self.assertEqual(len(vn2_neutron_list), 1)
vn2_subnet_list = self.list_resource(
'subnet', proj_uuid=proj_obj.uuid,
req_filters={'router:external': [False]})
self.assertEqual(len(vn2_subnet_list), 1)
self.assertEqual(vn2_neutron_list[0]['id'], vn2_obj.uuid)
#filter for list of router:external and
#shared network='False' should return 1
vn3_neutron_list = self.list_resource(
'network', proj_uuid=proj_obj.uuid,
req_filters={'shared': [False],
'router:external': [True]})
self.assertEqual(len(vn3_neutron_list), 1)
self.assertEqual(vn3_neutron_list[0]['id'], vn3_obj.uuid)
# cleanup
self.delete_resource('subnet', proj_obj.uuid, subnet1_q['id'])
self.delete_resource('subnet', proj_obj.uuid, subnet2_q['id'])
self.delete_resource('subnet', proj_obj.uuid, subnet3_q['id'])
self._vnc_lib.virtual_network_delete(id=vn1_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn2_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn3_obj.uuid)
# end test_filters_with_shared_and_router_external
def test_filters_with_complex_type(self):
proj_obj = vnc_api.Project('proj-%s' %(self.id()), vnc_api.Domain())
self._vnc_lib.project_create(proj_obj)
vn_obj = vnc_api.VirtualNetwork('vn1-%s' %(self.id()), proj_obj)
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType(
[vnc_api.IpamSubnetType(vnc_api.SubnetType('1.1.1.0', 28))]))
self._vnc_lib.virtual_network_create(vn_obj)
mac = vnc_api.MacAddressesType(mac_address= ['00:01:00:00:0f:3c'])
vmi_obj = vnc_api.VirtualMachineInterface(
'vmi-%s' %(self.id()), proj_obj,
virtual_machine_interface_mac_addresses= mac)
vmi_obj.set_virtual_network(vn_obj)
self._vnc_lib.virtual_machine_interface_create(vmi_obj)
vmi2_obj = vnc_api.VirtualMachineInterface(
'vmi2-%s' %(self.id()), proj_obj)
vmi2_obj.set_virtual_network(vn_obj)
self._vnc_lib.virtual_machine_interface_create(vmi2_obj)
# creating a port with mac address that already exist should fail
# port_create will do port_list with filter on mac address
try:
port_dict = self.create_resource('port', proj_obj.uuid,
extra_res_fields={
'name':'vmi3-%s' % self.id(),
'network_id': vn_obj.uuid,
'mac_address':
'00:01:00:00:0f:3c'
})
self.assertTrue(False,
'Create port with already existing mac address passed')
except webtest.app.AppError as e:
self.assertIsNot(re.search('Bad Request', str(e)), None)
self.assertIsNot(re.search('MacAddressInUse', str(e)), None)
# Cleanup
self._vnc_lib.virtual_machine_interface_delete(id=vmi_obj.uuid)
self._vnc_lib.virtual_machine_interface_delete(id=vmi2_obj.uuid)
self._vnc_lib.virtual_network_delete(id=vn_obj.uuid)
self._vnc_lib.project_delete(id=proj_obj.uuid)
# end test_filters_with_complex_type
# end class TestListWithFilters
class TestAuthenticatedAccess(test_case.NeutronBackendTestCase):
test_obj_uuid = None
test_failures = []
expected_auth_token = ''
@classmethod
def setUpClass(cls):
from keystonemiddleware import auth_token
class FakeAuthProtocol(object):
_test_cls = cls
def __init__(self, app, *args, **kwargs):
self._app = app
# end __init__
def __call__(self, env, start_response):
# in multi-tenancy mode only admin role admitted
# by api-server till full rbac support
if (env['REQUEST_METHOD'] == 'GET' and
env['PATH_INFO'] == '/virtual-network/%s' %(
self._test_cls.test_obj_uuid)):
# always execute but return back assertion
# errors
if not 'HTTP_X_AUTH_TOKEN' in env:
self._test_cls.test_failures.append(
'Missing HTTP_X_AUTH_TOKEN')
if not env['HTTP_X_AUTH_TOKEN'].startswith(
self._test_cls.expected_auth_token):
self._test_cls.test_failures.append(
'Found wrong HTTP_X_AUTH_TOKEN %s' %(
env['HTTP_X_AUTH_TOKEN']))
env['HTTP_X_ROLE'] = 'admin'
return self._app(env, start_response)
else:
env['HTTP_X_ROLE'] = 'admin'
return self._app(env, start_response)
# end __call__
# end class FakeAuthProtocol
super(TestAuthenticatedAccess, cls).setUpClass(
extra_config_knobs=[
('DEFAULTS', 'auth', 'keystone'),
('DEFAULTS', 'multi_tenancy', True),
('KEYSTONE', 'admin_user', 'foo'),
('KEYSTONE', 'admin_password', 'bar'),
('KEYSTONE', 'admin_tenant_name', 'baz'),],
extra_mocks=[
(auth_token, 'AuthProtocol', FakeAuthProtocol),
])
# end setupClass
def test_post_neutron_checks_auth_token(self):
test_obj = self._create_test_object()
TestAuthenticatedAccess.test_obj_uuid = test_obj.uuid
context = {'operation': 'READ',
'user_id': '',
'roles': ''}
data = {'fields': None,
'id': test_obj.uuid}
body = {'context': context, 'data': data}
TestAuthenticatedAccess.expected_auth_token = 'no user token for'
self._api_svr_app.post_json('/neutron/network', body)
self.assertEqual(self.test_failures, [])
TestAuthenticatedAccess.expected_auth_token = 'abc123'
self._api_svr_app.post_json('/neutron/network', body,
headers={'X_AUTH_TOKEN':'abc123'})
self.assertEqual(self.test_failures, [])
# end test_post_neutron_checks_auth_token
# end class TestAuthenticatedAccess
class TestRBACPerms(test_case.VncOpenstackTestCase):
domain_name = 'default-domain'
fqdn = [domain_name]
_api_session = requests.Session()
adapter = requests.adapters.HTTPAdapter()
_api_session.mount("http://", adapter)
@classmethod
def setUpClass(cls):
from keystonemiddleware import auth_token
class FakeAuthProtocol(object):
_test_cls = cls
def __init__(self, app, *args, **kwargs):
self._app = app
# end __init__
def __call__(self, env, start_response):
#Count number of calls made
if env.get('HTTP_X_AUTH_TOKEN') == 'test123':
env['HTTP_X_ROLE'] = 'test'
else:
env['HTTP_X_ROLE'] = 'cloud-admin'
return self._app(env, start_response)
# end __call__
# end class FakeAuthProtocol
extra_mocks = [(auth_token, 'AuthProtocol',
FakeAuthProtocol)]
extra_config_knobs = [
('DEFAULTS', 'aaa_mode', 'rbac'),
('DEFAULTS', 'cloud_admin_role', 'cloud-admin'),
('DEFAULTS', 'global_read_only_role', 'read-only-role'),
('DEFAULTS', 'auth', 'keystone'),
]
super(TestRBACPerms, cls).setUpClass(extra_mocks=extra_mocks,
extra_config_knobs=extra_config_knobs)
def test_neutron_perms(self):
test_obj = self._create_test_object()
proj_obj = self._vnc_lib.project_read(
fq_name=['default-domain', 'default-project'])
context = {'operation': 'CREATE',
'user_id': '',
'is_admin': False,
'roles': '',
'tenant_id': proj_obj.uuid}
data = {'resource': {'name': 'test_network',
'tenant_id': proj_obj.uuid}}
body = {'context': context, 'data': data}
uri = '/neutron/network'
url = "http://%s:%s%s" \
% (self._vnc_lib._web_host, self._vnc_lib._web_port, uri)
headers=self._vnc_lib._headers
headers['X_AUTH_TOKEN'] = 'test123'
header = json.dumps(headers)
body = json.dumps(body)
test_pass = False
val = TestRBACPerms._api_session.post(url, data=body, headers=headers, verify=False)
self.assertIn('NotAuthorized', val._content)
# end class TestRBACPerms
class TestKeystoneCallCount(test_case.NeutronBackendTestCase):
test_obj_uuid = None
test_failures = []
expected_auth_token = ''
_api_session = requests.Session()
adapter = requests.adapters.HTTPAdapter()
_api_session.mount("http://", adapter)
_call_count = 0
@classmethod
def setUpClass(cls):
from keystonemiddleware import auth_token
class FakeAuthProtocol(object):
_test_cls = cls
def __init__(self, app, *args, **kwargs):
self._app = app
# end __init__
def __call__(self, env, start_response):
#Count number of calls made
if env.get('HTTP_X_AUTH_TOKEN') == 'abc123':
self._test_cls._call_count = self._test_cls._call_count + 1
env['HTTP_X_ROLE'] = 'admin'
return self._app(env, start_response)
# end __call__
# end class FakeAuthProtocol
super(TestKeystoneCallCount, cls).setUpClass(
extra_config_knobs=[
('DEFAULTS', 'auth', 'keystone'),
('DEFAULTS', 'aaa_mode', 'cloud-admin'),
('KEYSTONE', 'admin_user', 'foo'),
('KEYSTONE', 'admin_password', 'bar'),
('KEYSTONE', 'admin_tenant_name', 'baz'),],
extra_mocks=[
(auth_token, 'AuthProtocol', FakeAuthProtocol),
])
# end setupClass
def test_keystone_call_count(self):
test_obj = self._create_test_object()
context = {'operation': 'READ',
'user_id': '',
'roles': ''}
data = {'fields': None,
'id': test_obj.uuid}
body = {'context': context, 'data': data}
uri = '/neutron/network'
url = "http://%s:%s%s" \
% (self._vnc_lib._web_host, self._vnc_lib._web_port, uri)
headers=self._vnc_lib._headers
headers['X_AUTH_TOKEN'] = 'abc123'
header = json.dumps(headers)
body = json.dumps(body)
# currently OP_GET goes through LocalVncApi.
TestKeystoneCallCount._api_session.post(url, data=body, headers=headers, verify=False)
self.assertEqual(TestKeystoneCallCount._call_count, 1)
# end test_keystone_call_count
# end class TestKeystoneCallCount
| 46.117754
| 170
| 0.567324
| 14,838
| 126,501
| 4.480388
| 0.045828
| 0.023917
| 0.038658
| 0.019359
| 0.840072
| 0.796901
| 0.76235
| 0.735424
| 0.701143
| 0.671661
| 0
| 0.010923
| 0.322622
| 126,501
| 2,742
| 171
| 46.134573
| 0.764906
| 0.073343
| 0
| 0.668781
| 0
| 0
| 0.123133
| 0.014119
| 0
| 0
| 0
| 0
| 0.0657
| 1
| 0.031717
| false
| 0.006343
| 0.009515
| 0
| 0.062075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c74689bfde86f4255066cf8148ed7189c2984e5e
| 90
|
py
|
Python
|
incomevis/__init__.py
|
hieumtran/incomevis
|
90adca62803f767d7c96fc879e662d934dcf9123
|
[
"MIT"
] | null | null | null |
incomevis/__init__.py
|
hieumtran/incomevis
|
90adca62803f767d7c96fc879e662d934dcf9123
|
[
"MIT"
] | null | null | null |
incomevis/__init__.py
|
hieumtran/incomevis
|
90adca62803f767d7c96fc879e662d934dcf9123
|
[
"MIT"
] | null | null | null |
from .utils import *
from .processing import *
from .vis import *
from .analysis import *
| 18
| 25
| 0.733333
| 12
| 90
| 5.5
| 0.5
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 90
| 4
| 26
| 22.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c75906ac470aa4950f55241088287203a901f75a
| 22
|
py
|
Python
|
src/utoolbox/stitching/__init__.py
|
liuyenting/utoolbox-legacy
|
dfcb24701ca25a37a223cc3c14b4433e6c296bfd
|
[
"Apache-2.0"
] | 2
|
2020-09-03T06:22:14.000Z
|
2020-10-04T10:14:56.000Z
|
src/utoolbox/stitching/__init__.py
|
liuyenting/utoolbox-legacy
|
dfcb24701ca25a37a223cc3c14b4433e6c296bfd
|
[
"Apache-2.0"
] | null | null | null |
src/utoolbox/stitching/__init__.py
|
liuyenting/utoolbox-legacy
|
dfcb24701ca25a37a223cc3c14b4433e6c296bfd
|
[
"Apache-2.0"
] | null | null | null |
from .sandbox import *
| 22
| 22
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c77f45821a4233808e053e22ac3cd304b51c4eea
| 47
|
py
|
Python
|
parampicker/__init__.py
|
khurramjaved96/experiment
|
831b5531baee2a268ffcf4b4f22942d75a4e5e26
|
[
"MIT"
] | 1
|
2019-04-17T16:29:00.000Z
|
2019-04-17T16:29:00.000Z
|
parampicker/__init__.py
|
Khurramjaved96/experiment
|
831b5531baee2a268ffcf4b4f22942d75a4e5e26
|
[
"MIT"
] | null | null | null |
parampicker/__init__.py
|
Khurramjaved96/experiment
|
831b5531baee2a268ffcf4b4f22942d75a4e5e26
|
[
"MIT"
] | null | null | null |
from parampicker.parampicker import ParamPicker
| 47
| 47
| 0.914894
| 5
| 47
| 8.6
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 47
| 1
| 47
| 47
| 0.977273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c79efd092d5aceb4a0e5530ed69dabe0066b925a
| 28
|
py
|
Python
|
search/__init__.py
|
joshuanazareth97/popviz
|
6d424e8523f8ee134fe6bf70113b6894d04f7375
|
[
"MIT"
] | null | null | null |
search/__init__.py
|
joshuanazareth97/popviz
|
6d424e8523f8ee134fe6bf70113b6894d04f7375
|
[
"MIT"
] | 2
|
2021-03-31T19:47:10.000Z
|
2021-08-23T20:42:46.000Z
|
search/__init__.py
|
joshuanazareth97/popviz
|
6d424e8523f8ee134fe6bf70113b6894d04f7375
|
[
"MIT"
] | null | null | null |
from .imdbsearcher import *
| 14
| 27
| 0.785714
| 3
| 28
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1bfacd7118d75a4c2e66d9ab7ab570eb9a16e9ef
| 3,832
|
py
|
Python
|
odin/tests/test_position.py
|
gsamarakoon/Odin
|
e2e9d638c68947d24f1260d35a3527dd84c2523f
|
[
"MIT"
] | 103
|
2017-01-14T19:38:14.000Z
|
2022-03-10T12:52:09.000Z
|
odin/tests/test_position.py
|
gsamarakoon/Odin
|
e2e9d638c68947d24f1260d35a3527dd84c2523f
|
[
"MIT"
] | 6
|
2017-01-19T01:38:53.000Z
|
2020-03-09T19:03:18.000Z
|
odin/tests/test_position.py
|
JamesBrofos/Odin
|
e2e9d638c68947d24f1260d35a3527dd84c2523f
|
[
"MIT"
] | 33
|
2017-02-05T21:51:17.000Z
|
2021-12-22T20:38:30.000Z
|
import unittest
import datetime as dt
from odin.handlers.position_handler.position import FilledPosition
from odin.utilities import params
class TestPosition(unittest.TestCase):
def test_to_database_position(self):
s = "SPY"
q = 100
d = params.Directions.long_dir
t = params.TradeTypes.buy_trade
a = params.action_dict[(d, t)]
pid = "test_portfolio_id"
date = dt.datetime.today()
price = 100.0
update_price = 101.0
pos = FilledPosition(s, d, t, pid, date, price)
pos.transact_shares(a, q, price)
pos.to_database_position()
def test_from_database_position(self):
s = "SPY"
pid = "test_portfolio_id"
pos = FilledPosition.from_database_position(pid, s)
self.assertEqual(pos.avg_price, 100.01)
self.assertEqual(pos.portfolio_id, pid)
self.assertEqual(pos.quantity, 100)
self.assertEqual(pos.direction, params.Directions.long_dir)
self.assertEqual(pos.trade_type, params.TradeTypes.buy_trade)
def test_long_position(self):
s = "GOOG"
q = 100
d = params.Directions.long_dir
t = params.TradeTypes.buy_trade
a = params.action_dict[(d, t)]
pid = "test_portfolio_id"
date = dt.datetime.today()
price = 100.0
update_price = 101.0
pos = FilledPosition(s, d, t, pid, date, price)
pos.transact_shares(a, q, price)
pos.update_market_value(update_price)
self.assertEqual(
pos.percent_pnl,
1 + (pos.market_value - pos.cost_basis) / pos.cost_basis
)
self.assertEqual(pos.quantity, q)
self.assertEqual(pos.market_value, 10100.0)
self.assertEqual(pos.unrealized_pnl, 99.0)
self.assertEqual(pos.tot_commission, 1.0)
sell_price = 100.5
pos.transact_shares(params.Actions.sell, q // 2, sell_price)
self.assertEqual(pos.quantity, q // 2)
self.assertEqual(pos.realized_pnl, 48.0)
self.assertEqual(pos.unrealized_pnl, 24.5)
self.assertEqual(pos.tot_commission, 2.0)
sell_price = 101.0
pos.transact_shares(params.Actions.sell, q // 2, sell_price)
self.assertEqual(pos.quantity, 0)
self.assertEqual(pos.realized_pnl, 72.0)
self.assertEqual(pos.unrealized_pnl, 0.)
self.assertEqual(pos.tot_commission, 3.0)
def test_short_position(self):
s = "GOOG"
q = 100
d = params.Directions.short_dir
t = params.TradeTypes.buy_trade
a = params.action_dict[(d, t)]
pid = "test_portfolio_id"
date = dt.datetime.today()
price = 100.0
update_price = 101.0
pos = FilledPosition(s, d, t, pid, date, price)
pos.transact_shares(a, q, price)
pos.update_market_value(update_price)
self.assertEqual(
pos.percent_pnl,
1 - (pos.market_value - pos.cost_basis) / pos.cost_basis
)
self.assertEqual(pos.quantity, q)
self.assertEqual(pos.market_value, -10100.0)
self.assertEqual(pos.unrealized_pnl, -101.0)
self.assertEqual(pos.tot_commission, 1.0)
buy_price = 100.5
pos.transact_shares(params.Actions.buy, q // 2, buy_price)
self.assertEqual(pos.quantity, q // 2)
self.assertEqual(pos.realized_pnl, -52.0)
self.assertEqual(pos.unrealized_pnl, -25.5)
self.assertEqual(pos.tot_commission, 2.0)
buy_price = 101.0
pos.transact_shares(params.Actions.buy, q // 2, buy_price)
self.assertEqual(pos.quantity, 0)
self.assertEqual(pos.realized_pnl, -78.0)
self.assertEqual(pos.unrealized_pnl, 0.)
self.assertEqual(pos.tot_commission, 3.0)
if __name__ == "__main__":
unittest.main()
| 35.813084
| 69
| 0.631263
| 503
| 3,832
| 4.620278
| 0.16501
| 0.200086
| 0.240103
| 0.098107
| 0.774527
| 0.753873
| 0.726334
| 0.726334
| 0.652324
| 0.628657
| 0
| 0.041549
| 0.258873
| 3,832
| 106
| 70
| 36.150943
| 0.776761
| 0
| 0
| 0.610526
| 0
| 0
| 0.023493
| 0
| 0
| 0
| 0
| 0
| 0.326316
| 1
| 0.042105
| false
| 0
| 0.042105
| 0
| 0.094737
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
40146c46c44e73317db5ed917527c52380dad0dc
| 207
|
py
|
Python
|
posetools/backend/base_imagebackend.py
|
HashimHL/posetools
|
057f02dae0d8c78db521d2700de5a4fbe6fe024d
|
[
"MIT"
] | 1
|
2021-12-20T01:39:19.000Z
|
2021-12-20T01:39:19.000Z
|
posetools/backend/base_imagebackend.py
|
HashimHL/posetools
|
057f02dae0d8c78db521d2700de5a4fbe6fe024d
|
[
"MIT"
] | null | null | null |
posetools/backend/base_imagebackend.py
|
HashimHL/posetools
|
057f02dae0d8c78db521d2700de5a4fbe6fe024d
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from unittest import TestCase
class BaseImageBackend(metaclass = ABCMeta):
@abstractmethod
def affine_transform(self, image, matrix, size = None):
pass
| 23
| 57
| 0.758454
| 23
| 207
| 6.782609
| 0.826087
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178744
| 207
| 9
| 58
| 23
| 0.917647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.166667
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
40bfe7058af0b5d2ac45d203eb90bebad3f0c31e
| 97
|
py
|
Python
|
fancymail/__init__.py
|
maxbelovnano/django-fancymail
|
7ef0824ece9341c585f082fe6ce7bcb0bb91d215
|
[
"MIT"
] | 1
|
2017-06-12T10:41:01.000Z
|
2017-06-12T10:41:01.000Z
|
fancymail/__init__.py
|
maxbelovnano/django-fancymail
|
7ef0824ece9341c585f082fe6ce7bcb0bb91d215
|
[
"MIT"
] | 1
|
2016-03-23T01:31:09.000Z
|
2016-03-23T01:31:09.000Z
|
fancymail/__init__.py
|
maxbelovnano/django-fancymail
|
7ef0824ece9341c585f082fe6ce7bcb0bb91d215
|
[
"MIT"
] | 5
|
2015-04-12T16:10:35.000Z
|
2021-05-27T00:30:25.000Z
|
from fancymail.emailrelated import EmailMessageRelated
from fancymail.send import send_fancy_mail
| 48.5
| 54
| 0.907216
| 12
| 97
| 7.166667
| 0.666667
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072165
| 97
| 2
| 55
| 48.5
| 0.955556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
40e4ec982655d165f1d7d378d3f6a5ae84546f06
| 39
|
py
|
Python
|
fluent_python/__init__.py
|
kissf-lu/py36_pipenv
|
d324cdc0b286a7a1733e84a9064fae8750c41bb5
|
[
"MIT"
] | null | null | null |
fluent_python/__init__.py
|
kissf-lu/py36_pipenv
|
d324cdc0b286a7a1733e84a9064fae8750c41bb5
|
[
"MIT"
] | null | null | null |
fluent_python/__init__.py
|
kissf-lu/py36_pipenv
|
d324cdc0b286a7a1733e84a9064fae8750c41bb5
|
[
"MIT"
] | null | null | null |
from .coroutine import step_cell, Grid
| 19.5
| 38
| 0.820513
| 6
| 39
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 1
| 39
| 39
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
40f396c0c9d0f80d0cea3c79086e9f9e31538c21
| 26
|
py
|
Python
|
fanorona_aec/__init__.py
|
AbhijeetKrishnan/gym-fanorona
|
0f8ea25091a0861e35a56c26f01cd78a3e0a5fe9
|
[
"MIT"
] | 2
|
2021-04-24T23:30:35.000Z
|
2021-11-25T11:54:45.000Z
|
fanorona_aec/__init__.py
|
AbhijeetKrishnan/fanorona-aec
|
0f8ea25091a0861e35a56c26f01cd78a3e0a5fe9
|
[
"MIT"
] | 1
|
2021-04-22T21:03:16.000Z
|
2021-04-22T21:03:16.000Z
|
fanorona_aec/__init__.py
|
AbhijeetKrishnan/gym-fanorona
|
0f8ea25091a0861e35a56c26f01cd78a3e0a5fe9
|
[
"MIT"
] | null | null | null |
from . import fanorona_v0
| 13
| 25
| 0.807692
| 4
| 26
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.153846
| 26
| 1
| 26
| 26
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc0fb4e4f7b842558ae6d600bb5f4b4a622f6bcf
| 39
|
py
|
Python
|
runp-heroku.py
|
superleader/chat2
|
b6c78ce4bfbba4081292e53efb094057fbdfe79a
|
[
"BSD-3-Clause"
] | 227
|
2020-05-08T13:40:55.000Z
|
2022-03-31T20:33:02.000Z
|
runp-heroku.py
|
wbonack/witty-name
|
07a4babed5b4ff4846c10268a65975c6c800de2b
|
[
"BSD-3-Clause"
] | 12
|
2019-03-26T21:42:00.000Z
|
2020-03-16T05:56:56.000Z
|
runp-heroku.py
|
wbonack/witty-name
|
07a4babed5b4ff4846c10268a65975c6c800de2b
|
[
"BSD-3-Clause"
] | 56
|
2020-05-16T13:24:21.000Z
|
2022-03-02T09:12:54.000Z
|
#!flask/bin/python
from app import app
| 13
| 19
| 0.769231
| 7
| 39
| 4.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 2
| 20
| 19.5
| 0.882353
| 0.435897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
905273ea566ab0a6223c7927a05414618fe1f12b
| 81
|
py
|
Python
|
WindowUtilFor360WebCam/__init__.py
|
Kazuhito00/WindowUtilFor360WebCam
|
8f238f64f8b9abc2073277d9d661ffd3378baa98
|
[
"Apache-2.0"
] | 2
|
2021-06-26T09:45:34.000Z
|
2021-06-26T09:52:13.000Z
|
WindowUtilFor360WebCam/__init__.py
|
Kazuhito00/WindowUtilFor360WebCam
|
8f238f64f8b9abc2073277d9d661ffd3378baa98
|
[
"Apache-2.0"
] | null | null | null |
WindowUtilFor360WebCam/__init__.py
|
Kazuhito00/WindowUtilFor360WebCam
|
8f238f64f8b9abc2073277d9d661ffd3378baa98
|
[
"Apache-2.0"
] | 1
|
2021-06-26T09:32:06.000Z
|
2021-06-26T09:32:06.000Z
|
from WindowUtilFor360WebCam.WindowUtilFor360WebCam import WindowUtilFor360WebCam
| 40.5
| 80
| 0.938272
| 5
| 81
| 15.2
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 0.049383
| 81
| 1
| 81
| 81
| 0.87013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
90533aa20fe5eccb0aef19b6c0bdd723730ac0d8
| 52
|
py
|
Python
|
pyqt_fitting_graphics_view/__init__.py
|
yjg30737/pyqt-fitting-graphics-view
|
dc1631cb9449d391ebabcd5976de3e7a210e488d
|
[
"MIT"
] | null | null | null |
pyqt_fitting_graphics_view/__init__.py
|
yjg30737/pyqt-fitting-graphics-view
|
dc1631cb9449d391ebabcd5976de3e7a210e488d
|
[
"MIT"
] | null | null | null |
pyqt_fitting_graphics_view/__init__.py
|
yjg30737/pyqt-fitting-graphics-view
|
dc1631cb9449d391ebabcd5976de3e7a210e488d
|
[
"MIT"
] | null | null | null |
from .fittingGraphicsView import FittingGraphicsView
| 52
| 52
| 0.923077
| 4
| 52
| 12
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057692
| 52
| 1
| 52
| 52
| 0.979592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
905ac9d2358857b35f0e692698e89243f645673a
| 30
|
py
|
Python
|
sdk/internal/__init__.py
|
shiksha360-site/site
|
5930d4a06961abaf15b17eecec51ca8e54d9a958
|
[
"MIT"
] | null | null | null |
sdk/internal/__init__.py
|
shiksha360-site/site
|
5930d4a06961abaf15b17eecec51ca8e54d9a958
|
[
"MIT"
] | null | null | null |
sdk/internal/__init__.py
|
shiksha360-site/site
|
5930d4a06961abaf15b17eecec51ca8e54d9a958
|
[
"MIT"
] | null | null | null |
from .internal_site import app
| 30
| 30
| 0.866667
| 5
| 30
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9082fe3604b6162145f8afc9989a91bf8a2e518a
| 42
|
py
|
Python
|
jetbrains-academy/Zookeeper/Problems/Alice in Wonderland/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | 5
|
2020-08-29T15:15:31.000Z
|
2022-03-01T18:22:34.000Z
|
jetbrains-academy/Zookeeper/Problems/Alice in Wonderland/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | null | null | null |
jetbrains-academy/Zookeeper/Problems/Alice in Wonderland/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | 1
|
2020-12-02T11:13:14.000Z
|
2020-12-02T11:13:14.000Z
|
print("Alice's Adventures in Wonderland")
| 21
| 41
| 0.785714
| 6
| 42
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
9085036c90fb51a196ce9e2a5eeae9bde05b1676
| 4,821
|
py
|
Python
|
unit_tests/checks/test_author.py
|
brobeson/bibcheck
|
9b5c3ac1e61f33775d47b3ff43de60155eb7a0ed
|
[
"MIT"
] | null | null | null |
unit_tests/checks/test_author.py
|
brobeson/bibcheck
|
9b5c3ac1e61f33775d47b3ff43de60155eb7a0ed
|
[
"MIT"
] | 3
|
2020-02-09T16:30:01.000Z
|
2020-02-10T20:04:16.000Z
|
unit_tests/checks/test_author.py
|
brobeson/bibcheck
|
9b5c3ac1e61f33775d47b3ff43de60155eb7a0ed
|
[
"MIT"
] | null | null | null |
"""Unit test for the author initials check."""
import unittest
import bibcheck.checker
import bibcheck.checks.author
class IssuesTest(unittest.TestCase):
"""Test cases for the author issue classes."""
def test_intials_issue(self):
"""Test the InitialsIssue class."""
issue = bibcheck.checks.author.InitialsIssue("bibliography.bib", 34)
self.assertEqual(issue.file_path, "bibliography.bib")
self.assertEqual(issue.line_number, 34)
self.assertEqual(issue.message, "Author initials must be separated by a space.")
def test_hyphenated_issue(self):
"""Test the HyphenatedIssue class."""
issue = bibcheck.checks.author.HyphenatedIssue("bibliography.bib", 34)
self.assertEqual(issue.file_path, "bibliography.bib")
self.assertEqual(issue.line_number, 34)
self.assertEqual(
issue.message,
"Author names with hyphenated lowercase should use braces: Na-me -> Na{-me}",
)
class CheckTest(unittest.TestCase):
"""Test cases for the check() function."""
def __init__(self, methodName="runTest"):
super().__init__(methodName)
self.checker = bibcheck.checks.author.AuthorChecker()
def test_initials_ok(self):
"""Test author lines that do not have issues with initial spaces."""
lines = [
bibcheck.checker.Line("title={Hamlet},", "references.bib", 10),
bibcheck.checker.Line(
"author={Shakespeare, William},", "references.bib", 11
),
bibcheck.checker.Line("author={Shakespeare, W. A.},", "references.bib", 11),
bibcheck.checker.Line(
"author={Shakespeare, William and Bacon, Francis},",
"references.bib",
11,
),
bibcheck.checker.Line(
"title={Hamlet}, author={Shakespeare, William},", "references.bib", 10
),
]
for line in lines:
with self.subTest():
self.assertFalse(self.checker.check(line))
def test_initials_bad(self):
"""Test author lines that have issues with initial spaces."""
lines = [
bibcheck.checker.Line("author={Shakespeare, W.A.},", "references.bib", 11),
bibcheck.checker.Line(
"author={Shakespeare, W.A. and Bacon, Francis},", "references.bib", 11
),
bibcheck.checker.Line(
"author={Shakespeare, W. A. and Bacon, F.A.},", "references.bib", 11
),
bibcheck.checker.Line(
"title={Hamlet}, author={Shakespeare, W.A.},", "references.bib", 11
),
bibcheck.checker.Line(
"title={Hamlet}, author={Shakespeare, WA.},", "references.bib", 11
),
bibcheck.checker.Line(
"title={Hamlet}, author={Shakespeare, W.A},", "references.bib", 11
),
bibcheck.checker.Line(
"title={Hamlet}, author={Shakespeare, WA},", "references.bib", 11
),
]
for line in lines:
with self.subTest():
issues = self.checker.check(line)
self.assertEqual(len(issues), 1)
self.assertIsNotNone(issues[0])
self.assertEqual(issues[0].file_path, "references.bib")
self.assertEqual(issues[0].line_number, 11)
self.assertEqual(
issues[0].message, "Author initials must be separated by a space."
)
def test_hyphens_ok(self):
"""Test author lines that do not have issues with hyphenated names."""
lines = [
bibcheck.checker.Line(
"author={Shakespeare, William}", "references.bib", 11
),
bibcheck.checker.Line(
"author={Shakespeare, Wil-{liam}", "references.bib", 11
),
bibcheck.checker.Line(
"author={Shakespeare, Wil-Liam", "references.bib", 11
),
]
for line in lines:
with self.subTest():
self.assertFalse(self.checker.check(line))
def test_hyphens_bad(self):
"""Test author lines that have issues with hyphenated names."""
issues = self.checker.check(
bibcheck.checker.Line(
"author={Shakespeare, Wil-liam}", "references.bib", 11
)
)
self.assertEqual(len(issues), 1)
self.assertIsNotNone(issues[0])
self.assertEqual(issues[0].file_path, "references.bib")
self.assertEqual(issues[0].line_number, 11)
self.assertEqual(
issues[0].message,
"Author names with hyphenated lowercase should use braces: Na-me -> Na{-me}",
)
| 39.195122
| 89
| 0.565443
| 494
| 4,821
| 5.461538
| 0.176113
| 0.086731
| 0.112676
| 0.093773
| 0.829503
| 0.784655
| 0.757228
| 0.757228
| 0.757228
| 0.699036
| 0
| 0.016139
| 0.305953
| 4,821
| 122
| 90
| 39.516393
| 0.790197
| 0.087534
| 0
| 0.544554
| 0
| 0
| 0.26028
| 0
| 0
| 0
| 0
| 0
| 0.178218
| 1
| 0.069307
| false
| 0
| 0.029703
| 0
| 0.118812
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
909759615114a736b715439cc953bef2beb61f03
| 10,362
|
py
|
Python
|
tests/unit/publishers/test_datadog.py
|
eventbrite/pymetrics
|
af36ac04f2f0a86924fd7a5d9dff980d9030427f
|
[
"Apache-2.0"
] | 5
|
2019-10-15T12:42:48.000Z
|
2021-08-15T02:49:55.000Z
|
tests/unit/publishers/test_datadog.py
|
eventbrite/pymetrics
|
af36ac04f2f0a86924fd7a5d9dff980d9030427f
|
[
"Apache-2.0"
] | 1
|
2020-03-05T20:24:32.000Z
|
2020-03-06T14:47:47.000Z
|
tests/unit/publishers/test_datadog.py
|
eventbrite/pymetrics
|
af36ac04f2f0a86924fd7a5d9dff980d9030427f
|
[
"Apache-2.0"
] | 4
|
2020-04-13T23:42:29.000Z
|
2021-09-08T11:34:44.000Z
|
"""isort:skip_file"""
from __future__ import absolute_import # DO NOT import UNICODE LITERALS in this file!
from collections import OrderedDict
import pytest
from pymetrics.instruments import (
Counter,
Gauge,
Histogram,
Timer,
)
from pymetrics.publishers.datadog import DogStatsdPublisher
class TestDogStatsdPublisher(object):
def test_maximum_packet_size_constructor(self):
publisher = DogStatsdPublisher('127.0.0.1', 8125)
assert publisher.maximum_packet_size == 8000
publisher = DogStatsdPublisher('127.0.0.1', 8125, maximum_packet_size=1700)
assert publisher.maximum_packet_size == 1700
publisher = DogStatsdPublisher('127.0.0.1', 8125, maximum_packet_size=175000)
assert publisher.maximum_packet_size == 8000
def test_global_tags_invalid(self):
with pytest.raises(ValueError):
# noinspection PyTypeChecker
DogStatsdPublisher('localhost', '1234', global_tags=['this', 'is', 'invalid']) # type: ignore
def test_extra_gauge_tags_invalid(self):
with pytest.raises(ValueError):
# noinspection PyTypeChecker
DogStatsdPublisher('localhost', '1234', extra_gauge_tags=['this', 'is', 'invalid']) # type: ignore
def test_no_metrics_does_nothing(self):
publisher = DogStatsdPublisher('127.0.0.1', 8125)
assert publisher.get_formatted_metrics([]) == []
def test_no_metric_values_does_nothing(self):
publisher = DogStatsdPublisher('127.0.0.1', 8125)
assert publisher.get_formatted_metrics([Timer(u'hello')]) == []
def test_with_no_tags(self):
counter = Counter('test.foo.timer.1')
counter.increment()
gauge = Gauge('test.bar.gauge.1')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2)
histogram = Histogram('test.qux.histogram.1')
histogram.set(13)
publisher = DogStatsdPublisher('localhost', 1234)
assert (
publisher.get_formatted_metrics([counter, gauge, timer, histogram]) ==
[b'test.foo.timer.1:1|c', b'test.bar.gauge.1:5|g', b'test.baz.timer.1:2|ms', b'test.qux.histogram.1:13|h']
)
def test_with_one_global_tag_no_value(self):
counter = Counter('test.foo.timer.1')
counter.increment()
gauge = Gauge('test.bar.gauge.1')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2)
publisher = DogStatsdPublisher('localhost', 1234, global_tags={'blank_tag': None})
assert (
publisher.get_formatted_metrics([counter, gauge, timer]) ==
[
b'test.foo.timer.1:1|c|#blank_tag',
b'test.bar.gauge.1:5|g|#blank_tag',
b'test.baz.timer.1:2|ms|#blank_tag',
]
)
def test_with_one_global_tag_with_value(self):
counter = Counter('test.foo.timer.1')
counter.increment()
gauge = Gauge('test.bar.gauge.1')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2)
publisher = DogStatsdPublisher('localhost', 1234, global_tags={'integration': 'abc123'})
assert (
publisher.get_formatted_metrics([counter, gauge, timer]) ==
[
b'test.foo.timer.1:1|c|#integration:abc123',
b'test.bar.gauge.1:5|g|#integration:abc123',
b'test.baz.timer.1:2|ms|#integration:abc123',
]
)
def test_with_multiple_global_tags(self):
counter = Counter('test.foo.timer.1')
counter.increment()
gauge = Gauge('test.bar.gauge.1')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2)
publisher = DogStatsdPublisher(
'localhost',
1234,
global_tags=OrderedDict([('environment', 'qa'), ('acceptance', None), ('jenkins-build', 8293847)]),
)
assert (
publisher.get_formatted_metrics([counter, gauge, timer]) ==
[
b'test.foo.timer.1:1|c|#environment:qa,acceptance,jenkins-build:8293847',
b'test.bar.gauge.1:5|g|#environment:qa,acceptance,jenkins-build:8293847',
b'test.baz.timer.1:2|ms|#environment:qa,acceptance,jenkins-build:8293847',
]
)
def test_with_one_global_tag_and_one_extra_gauge_tag(self):
counter = Counter('test.foo.timer.1')
counter.increment()
gauge = Gauge('test.bar.gauge.1')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2)
publisher = DogStatsdPublisher(
'localhost',
1234,
global_tags={'integration': 'abc123'},
extra_gauge_tags={'worker': '456def'},
)
assert (
publisher.get_formatted_metrics([counter, gauge, timer]) ==
[
b'test.foo.timer.1:1|c|#integration:abc123',
b'test.bar.gauge.1:5|g|#integration:abc123,worker:456def',
b'test.baz.timer.1:2|ms|#integration:abc123',
]
)
def test_with_only_extra_gauge_tags(self):
counter = Counter('test.foo.timer.1')
counter.increment()
gauge = Gauge('test.bar.gauge.1')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2)
publisher = DogStatsdPublisher('localhost', 1234, extra_gauge_tags={'worker': '456def'})
assert (
publisher.get_formatted_metrics([counter, gauge, timer]) ==
[
b'test.foo.timer.1:1|c',
b'test.bar.gauge.1:5|g|#worker:456def',
b'test.baz.timer.1:2|ms',
]
)
def test_with_only_instrument_tags(self):
counter = Counter('test.foo.timer.1', hello='world')
counter.increment()
gauge = Gauge('test.bar.gauge.1', extra='data', nothing=None, mail='snail', guitar='electric')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2, number=5791)
publisher = DogStatsdPublisher('localhost', 1234)
metrics = publisher.get_formatted_metrics([counter, gauge, timer])
assert metrics[0] == b'test.foo.timer.1:1|c|#hello:world'
assert metrics[2] == b'test.baz.timer.1:2|ms|#number:5791'
assert metrics[1].startswith(b'test.bar.gauge.1:5|g|#')
assert b'extra:data' in metrics[1]
assert b'nothing' in metrics[1]
assert b'nothing:' not in metrics[1]
assert b'mail:snail' in metrics[1]
assert b'guitar:electric' in metrics[1]
def test_with_global_and_instrument_tags(self):
counter = Counter('test.foo.timer.1', hello='world')
counter.increment()
gauge = Gauge('test.bar.gauge.1', extra='data', nothing=None, mail='snail', guitar='electric')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2, number=5791.15, other_number=0)
publisher = DogStatsdPublisher(
'localhost',
1234,
global_tags=OrderedDict([('environment', 'qa'), ('acceptance', None), ('jenkins-build', 8293847)]),
extra_gauge_tags={'worker': '52'},
)
metrics = publisher.get_formatted_metrics([counter, gauge, timer], enable_meta_metrics=True)
assert metrics[0].startswith(b'pymetrics.meta.publish.statsd.format_metrics:')
assert metrics[0].endswith(b'|ms|#environment:qa,acceptance,jenkins-build:8293847')
assert metrics[1] == b'test.foo.timer.1:1|c|#environment:qa,acceptance,jenkins-build:8293847,hello:world'
assert metrics[2].startswith(
b'test.bar.gauge.1:5|g|#environment:qa,acceptance,jenkins-build:8293847,worker:52,'
)
assert b',extra:data' in metrics[2]
assert b',nothing' in metrics[2]
assert b',nothing:' not in metrics[2]
assert b',mail:snail' in metrics[2]
assert b',guitar:electric' in metrics[2]
assert metrics[3].startswith(b'test.baz.timer.1:2|ms|#environment:qa,acceptance,jenkins-build:8293847')
assert b',number:5791.15' in metrics[3]
assert b',other_number:0' in metrics[3]
def test_with_global_and_instrument_tags_and_distributions(self):
counter = Counter('test.foo.timer.1', hello='world')
counter.increment()
gauge = Gauge('test.bar.gauge.1', extra='data', nothing=None, mail='snail', guitar='electric')
gauge.set(5)
timer = Timer('test.baz.timer.1', initial_value=2, number=5791.15, other_number=0)
histogram = Histogram('test.qux.histogram.1', extra='data', nothing=None, mail='snail', guitar='electric')
histogram.set(91)
publisher = DogStatsdPublisher(
'localhost',
1234,
global_tags=OrderedDict([('environment', 'qa'), ('acceptance', None), ('jenkins-build', 8293847)]),
extra_gauge_tags={'worker': '52'},
use_distributions=True,
)
metrics = publisher.get_formatted_metrics([counter, gauge, timer, histogram], enable_meta_metrics=True)
assert metrics[0].startswith(b'pymetrics.meta.publish.statsd.format_metrics:')
assert metrics[0].endswith(b'|d|#environment:qa,acceptance,jenkins-build:8293847')
assert metrics[1] == b'test.foo.timer.1:1|c|#environment:qa,acceptance,jenkins-build:8293847,hello:world'
assert metrics[2].startswith(
b'test.bar.gauge.1:5|g|#environment:qa,acceptance,jenkins-build:8293847,worker:52,'
)
assert b',extra:data' in metrics[2]
assert b',nothing' in metrics[2]
assert b',nothing:' not in metrics[2]
assert b',mail:snail' in metrics[2]
assert b',guitar:electric' in metrics[2]
assert metrics[3].startswith(b'test.baz.timer.1:2|d|#environment:qa,acceptance,jenkins-build:8293847')
assert b',number:5791.15' in metrics[3]
assert b',other_number:0' in metrics[3]
assert metrics[4].startswith(
b'test.qux.histogram.1:91|d|#environment:qa,acceptance,jenkins-build:8293847,'
)
assert b',extra:data' in metrics[4]
assert b',nothing' in metrics[4]
assert b',nothing:' not in metrics[4]
assert b',mail:snail' in metrics[4]
assert b',guitar:electric' in metrics[4]
| 37.007143
| 118
| 0.618703
| 1,292
| 10,362
| 4.847523
| 0.098297
| 0.034488
| 0.034488
| 0.037362
| 0.886795
| 0.870509
| 0.797062
| 0.775188
| 0.723775
| 0.70206
| 0
| 0.057114
| 0.239626
| 10,362
| 279
| 119
| 37.139785
| 0.737784
| 0.013607
| 0
| 0.514423
| 0
| 0.048077
| 0.268534
| 0.144746
| 0
| 0
| 0
| 0
| 0.235577
| 1
| 0.067308
| false
| 0
| 0.024038
| 0
| 0.096154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
90b0006c6e28f489517254593e9fa6dd758c0981
| 33,633
|
py
|
Python
|
Stochastic_engine/CA_hydropower_daily_forecast.py
|
romulus97/HYDROWIRES
|
115e534764d8f58d64340d99cf6cb8eb6598c4ee
|
[
"MIT"
] | null | null | null |
Stochastic_engine/CA_hydropower_daily_forecast.py
|
romulus97/HYDROWIRES
|
115e534764d8f58d64340d99cf6cb8eb6598c4ee
|
[
"MIT"
] | null | null | null |
Stochastic_engine/CA_hydropower_daily_forecast.py
|
romulus97/HYDROWIRES
|
115e534764d8f58d64340d99cf6cb8eb6598c4ee
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon May 27 11:13:15 2019
@author: jkern
"""
from __future__ import division
import pandas as pd
import numpy as np
from datetime import datetime
import matplotlib.pyplot as plt
def hydro(sim_years):
#########################################################################
# This purpose of this script is to use synthetic streamflows at major California
# reservoir sites to simulate daily hydropower production for the PG&E and SCE
# zones of the California electricty market (CAISO), using parameters optimized
# via a differential evolution algorithm.
#########################################################################
# load California storage reservoir (ORCA) sites
df_sites = pd.read_excel('CA_hydropower/sites.xlsx',sheet_name = 'ORCA',header=0)
ORCA_sites = list(df_sites)
# load upper generation amounts for each predicted hydropower dam (PG&E and SCE)
upper_gen = pd.read_excel('CA_hydropower/upper.xlsx',header =0)
# month-day calender
calender = pd.read_excel('CA_hydropower/calender.xlsx',header=0)
# load simulated full natural flows at each California storage reservoir (ORCA site)
df_sim = pd.read_csv('Synthetic_streamflows/synthetic_streamflows_CA.csv',header=0)
df_sim = df_sim.loc[0:(sim_years+3)*365,:]
# load simulated outflows calculated by ORCA
df_ORCA = pd.read_csv('ORCA_output.csv')
outflow_sites = ['SHA_otf','ORO_otf','YRS_otf','FOL_otf','NML_otf','DNP_otf','EXC_otf','MIL_otf','ISB_otf','SUC_otf','KWH_otf','PFT_otf']
for i in range(0,len(df_ORCA)):
for s in outflow_sites:
df_sim.loc[i,s] = df_ORCA.loc[i,s]
sim_years = sim_years+3
#Add month and day columns to the dataframe
Month = []
Day = []
count = 0
for i in range(0,len(df_sim)):
if count < 365:
Month = np.append(Month,calender.loc[count,'Month'])
Day = np.append(Day,calender.loc[count,'Day'])
count = count + 1
else:
count = 0
Month = np.append(Month,calender.loc[count,'Month'])
Day = np.append(Day,calender.loc[count,'Day'])
count = count + 1
df_sim['Month']=Month
df_sim['Day']=Day
# calculate simulated totals
Sim_totals = []
for i in range(0,sim_years):
sample = df_sim.loc[i*365:i*365+365,'ORO_fnf':'ISB_fnf']
total = np.sum(np.sum(sample))
Sim_totals = np.append(Sim_totals,total)
# load historical full natural flows for 2001, 2005, 2010 and 2011
df_hist = pd.read_excel('CA_hydropower/hist_reservoir_inflows.xlsx',header=0)
Hist_totals = []
Hist_years = [2001,2005,2010,2011]
for i in Hist_years:
sample = df_hist[df_hist['year'] == i]
sample = sample.loc[:,'ORO_fnf':'ISB_fnf']
total = np.sum(np.sum(sample))
Hist_totals = np.append(Hist_totals,total)
# find most similar historical year for each simulated year
Rule_list=[]
for i in range(0,sim_years):
Difference=abs(Sim_totals[i]- Hist_totals)
#Select which rule to use
for n in range(0,len(Hist_years)):
if Difference[n]==np.min(Difference):
Rule=n
Rule_list.append(Rule)
# PGE hydro projects
PGE_names = pd.read_excel('CA_hydropower/sites.xlsx',sheet_name ='PGE',header=0)
PGE_dams = list(PGE_names.loc[:,'Balch 1':])
PGE_Storage=[PGE_dams[3],PGE_dams[7],PGE_dams[8],PGE_dams[9]]
PGE_No_Data_Dams=[PGE_dams[2],PGE_dams[4],PGE_dams[10],PGE_dams[11],PGE_dams[15],PGE_dams[16],PGE_dams[17],PGE_dams[26],PGE_dams[30],PGE_dams[38],PGE_dams[39],PGE_dams[55],PGE_dams[60],PGE_dams[65]]
## SCE hydro projects
SCE_names = pd.read_excel('CA_hydropower/sites.xlsx',sheet_name ='SCE',header=0)
SCE_dams = list(SCE_names.loc[:,'Big_Creek_1 ':])
SCE_No_Data_Dams=[SCE_dams[7],SCE_dams[8],SCE_dams[12]]
#Simulate all the PGE inflow dams
check_unused = []
PGE_name_list = []
SCE_name_list = []
f_horizon = 7
for name in PGE_dams:
STOR = np.zeros((365*(sim_years),1))
for year in range(0,sim_years):
GEN = np.zeros((365,7))
if name in PGE_No_Data_Dams:
pass
elif name in PGE_Storage:
# which operating rule to use?
Rule=Rule_list[year]
File_name='CA_hydropower/PGE_Storage_FNF_V2/1.0_FNF_Storage_Rule_' + str(name) +'.txt'
Temp_Rule=pd.read_csv(File_name,delimiter=' ',header=None)
peak_flow,starting,ending,refill_1_date,evac_date,peak_end,refill_2_date,storage,power_cap,eff,min_power=Temp_Rule.loc[Rule][:]
k = str(PGE_names.loc[0][name])
I_O=str(PGE_names.loc[1][name])
#Which site to use
if k =='Oroville' and I_O =='Inflows':
site_name=['ORO_fnf']
elif k =='Oroville' and I_O =='Outflows':
site_name=['ORO_otf']
elif k =='Pine Flat' and I_O =='Inflows':
site_name=['PFT_fnf']
elif k =='Pine Flat' and I_O =='Outflows':
site_name=['PFT_otf']
elif k =='Shasta' and I_O =='Inflows':
site_name=['SHA_fnf']
elif k =='Shasta' and I_O =='Outflows':
site_name=['SHA_otf']
elif k =='New Melones' and I_O =='Inflows':
site_name=['NML_fnf']
elif k =='New Melones' and I_O =='Outflows':
site_name=['NML_otf']
elif k =='Pardee' and I_O =='Inflows':
site_name=['PAR_fnf']
elif k =='Pardee' and I_O =='Outflows':
site_name=['PAR_otf']
elif k =='New Exchequer' and I_O =='Inflows':
site_name=['EXC_fnf']
elif k =='New Exchequer' and I_O =='Outflows':
site_name=['EXC_otf']
elif k =='Folsom' and I_O =='Inflows':
site_name=['FOL_fnf']
elif k =='Folsom' and I_O =='Outflows':
site_name=['FOL_otf']
elif k =='Don Pedro' and I_O =='Inflows':
site_name=['DNP_fnf']
elif k =='Don Pedro' and I_O =='Outflows':
site_name=['DNP_otf']
elif k =='Millerton' and I_O =='Inflows':
site_name=['MIL_fnf']
elif k =='Millerton' and I_O =='Outflows':
site_name=['MIL_otf']
elif k =='Isabella' and I_O =='Inflows':
site_name=['ISB_fnf']
elif k =='Isabella' and I_O =='Outflows':
site_name=['ISB_otf']
elif k =='Yuba' and I_O =='Inflows':
site_name=['YRS_fnf']
elif k =='Yuba' and I_O =='Outflows':
site_name=['YRS_otf']
else:
None
flow_ts = df_sim.loc[:,site_name].values
# iterate through every day of the year
for day in range(0,365):
for fd in range(0,f_horizon):
s = day + fd
#forecast day? if not, take beginning storage from previous time step
if day>0 and fd < 1:
storage = STOR[year*365+day-1]
elif day<1 and fd <1:
storage = 0
else:
pass
# available hydro production based on water availability
avail_power = flow_ts[year*365+day]*eff
# if it's during first refill
if s < refill_1_date:
gen =starting- ((starting-min_power)/refill_1_date)*s
storage = avail_power-gen
# if it maintains the water
elif s >= refill_1_date and s < evac_date:
gen=min_power
storage= storage + (avail_power- gen)
# if it's in evac period 2
elif s >= evac_date and s < peak_end:
gen= min_power+ ((power_cap-min_power)/(peak_end-evac_date)* (s- evac_date))
if gen > power_cap:
gen=power_cap
storage= storage + (avail_power- gen)
else:
storage= storage + (avail_power- gen)
# if it's in evac period 2
elif s >= peak_end and s < refill_2_date:
gen= power_cap
if gen > power_cap:
gen=power_cap
storage= storage + (avail_power- gen)
else:
storage= storage + (avail_power- gen)
elif s >=refill_2_date :
gen = power_cap-((power_cap-ending)/(365-refill_2_date)* (s-refill_2_date))
GEN[day,fd] = gen
if fd < 1:
STOR[year*365+day] = storage
else:
upper_now=upper_gen.loc[upper_gen.loc[:,'Name']== name]
upper_now=upper_now.reset_index(drop=True)
upper=upper_now.loc[0]['Max Gen']
Rule=Rule_list[year]
File_name='CA_hydropower/PGE_FNF_2/FNF_' + str(name) +'.txt'
Temp_Rule=pd.read_csv(File_name,delimiter=' ',header=None)
peak_flow,sum_cap,spr_cap,fall_cap,win_date,spr_date,sum_date,fall_date,eff,check_surplus=Temp_Rule.loc[Rule][:]
surplus = 0
transfer = 0
k = str(PGE_names.loc[0][name])
I_O=str(PGE_names.loc[1][name])
if k =='Oroville' and I_O =='Inflows':
site_name=['ORO_fnf']
elif k =='Oroville' and I_O =='Outflows':
site_name=['ORO_otf']
elif k =='Pine Flat' and I_O =='Inflows':
site_name=['PFT_fnf']
elif k =='Pine Flat' and I_O =='Outflows':
site_name=['PFT_otf']
elif k =='Shasta' and I_O =='Inflows':
site_name=['SHA_fnf']
elif k =='Shasta' and I_O =='Outflows':
site_name=['SHA_otf']
elif k =='New Melones' and I_O =='Inflows':
site_name=['NML_fnf']
elif k =='New Melones' and I_O =='Outflows':
site_name=['NML_otf']
elif k =='Pardee' and I_O =='Inflows':
site_name=['PAR_fnf']
elif k =='Pardee' and I_O =='Outflows':
site_name=['PAR_otf']
elif k =='New Exchequer' and I_O =='Inflows':
site_name=['EXC_fnf']
elif k =='New Exchequer' and I_O =='Outflows':
site_name=['EXC_otf']
elif k =='Folsom' and I_O =='Inflows':
site_name=['FOL_fnf']
elif k =='Folsom' and I_O =='Outflows':
site_name=['FOL_otf']
elif k =='Don Pedro' and I_O =='Inflows':
site_name=['DNP_fnf']
elif k =='Don Pedro' and I_O =='Outflows':
site_name=['DNP_otf']
elif k =='Millerton' and I_O =='Inflows':
site_name=['MIL_fnf']
elif k =='Millerton' and I_O =='Outflows':
site_name=['MIL_otf']
elif k =='Isabella' and I_O =='Inflows':
site_name=['ISB_fnf']
elif k =='Isabella' and I_O =='Outflows':
site_name=['ISB_otf']
elif k =='Yuba' and I_O =='Inflows':
site_name=['YRS_fnf']
elif k =='Yuba' and I_O =='Outflows':
site_name=['YRS_otf']
else:
None
flow_ts = df_sim.loc[:,site_name].values
annual = flow_ts[year*365:year*365+365]
max_flow = np.max(annual[105:260])
L = list(annual)
peak_flow = L.index(max_flow)
for day in range(0,365):
for fd in range(0,f_horizon):
s = day + fd
#forecast day? if not, take beginning storage from previous time step
if day>0 and fd < 1:
surplus = STOR[year*365+day-1]
elif day<1 and fd <1:
surplus = 0
else:
pass
# available hydro production based on water availability
avail_power = flow_ts[year*365+day]*eff
# if it's still winter, operate as RoR
if s < peak_flow - win_date:
if avail_power >= upper:
gen = upper
surplus = surplus + (avail_power - upper)
else:
gen = avail_power
# if it's spring, operate as RoR with upper limit
elif s >= peak_flow - win_date and s < peak_flow - spr_date:
if avail_power > spr_cap:
surplus = surplus + (avail_power - spr_cap)
gen = spr_cap
elif avail_power <= spr_cap:
deficit = spr_cap - avail_power
if surplus>0:
transfer = np.min((surplus,deficit))
surplus = surplus - transfer
else:
transfer = 0
gen = avail_power + transfer
# if it's summer, operate as RoR with upper limit
elif s >= peak_flow - spr_date and s < peak_flow + sum_date:
if avail_power > sum_cap:
surplus = surplus + (avail_power - sum_cap)
gen = sum_cap
elif avail_power <= sum_cap:
deficit = sum_cap - avail_power
if surplus>0:
transfer = np.min((surplus,deficit))
surplus = surplus - transfer
else:
transfer = 0
gen = avail_power + transfer
# if it's fall, operate as RoR with upper limit
elif s >= peak_flow + sum_date and s < peak_flow + fall_date:
if avail_power > fall_cap:
surplus = surplus + (avail_power - fall_cap)
gen = fall_cap
elif avail_power <= fall_cap:
deficit = fall_cap - avail_power
if surplus>0:
transfer = np.min((surplus,deficit))
surplus = surplus - transfer
else:
transfer = 0
gen = avail_power + transfer
elif s >= peak_flow + fall_date:
if avail_power >= upper:
gen = upper
surplus = surplus + (avail_power - upper)
else:
gen = avail_power
else:
if avail_power >= upper:
gen = upper
surplus = surplus + (avail_power - upper)
else:
gen = avail_power
GEN[day,fd] = gen
if fd < 1:
STOR[year*365+day] = surplus
# unused=surplus
# check_unused.append(surplus)
# rest_surplus=sum(check_unused)
if year < 1:
A_PGE = GEN
else:
A_PGE = np.vstack((A_PGE,GEN))
if name in PGE_No_Data_Dams:
pass
else:
PGE_name_list = np.append(PGE_name_list,name)
name_index = PGE_dams.index(name)
if name_index < 1:
M_PGE = A_PGE
else:
M_PGE = np.dstack((M_PGE,A_PGE))
for i in range(0,len(PGE_name_list)):
name = PGE_name_list[i]
filename = 'CA_hydropower/' + name + '_hydro.csv'
gen = M_PGE[:,:,i]
df_gen = pd.DataFrame(gen)
df_gen.columns = ['fd1','fd2','fd3','fd4','fd5','fd6','fd7']
df_gen.to_csv(filename)
##Simulate all the SCE inflow dams
for name in SCE_dams:
STOR = np.zeros((365*sim_years,1))
for year in range(0,sim_years):
GEN = np.zeros((365,7))
if name in SCE_No_Data_Dams:
pass
else:
Rule=Rule_list[year]
File_name='CA_hydropower/SCE_FNF_V2/SCE_fnf_' + str(name) +'.txt'
Temp_Rule=pd.read_csv(File_name,delimiter=' ',header=None)
peak_flow,sum_cap,spr_cap,fall_cap,win_date,spr_date,sum_date,fall_date,eff,check_surplus=Temp_Rule.loc[Rule][:]
surplus = 0
transfer = 0
k = str(SCE_names.loc[0][name])
I_O=str(SCE_names.loc[1][name])
if k =='Oroville' and I_O =='Inflows':
site_name=['ORO_fnf']
elif k =='Oroville' and I_O =='Outflows':
site_name=['ORO_otf']
elif k =='Pine Flat' and I_O =='Inflows':
site_name=['PFT_fnf']
elif k =='Pine Flat' and I_O =='Outflows':
site_name=['PFT_otf']
elif k =='Shasta' and I_O =='Inflows':
site_name=['SHA_fnf']
elif k =='Shasta' and I_O =='Outflows':
site_name=['SHA_otf']
elif k =='New Melones' and I_O =='Inflows':
site_name=['NML_fnf']
elif k =='New Melones' and I_O =='Outflows':
site_name=['NML_otf']
elif k =='Pardee' and I_O =='Inflows':
site_name=['PAR_fnf']
elif k =='Pardee' and I_O =='Outflows':
site_name=['PAR_otf']
elif k =='New Exchequer' and I_O =='Inflows':
site_name=['EXC_fnf']
elif k =='New Exchequer' and I_O =='Outflows':
site_name=['EXC_otf']
elif k =='Folsom' and I_O =='Inflows':
site_name=['FOL_fnf']
elif k =='Folsom' and I_O =='Outflows':
site_name=['FOL_otf']
elif k =='Don Pedro' and I_O =='Inflows':
site_name=['DNP_fnf']
elif k =='Don Pedro' and I_O =='Outflows':
site_name=['DNP_otf']
elif k =='Millerton' and I_O =='Inflows':
site_name=['MIL_fnf']
elif k =='Millerton' and I_O =='Outflows':
site_name=['MIL_otf']
elif k =='Isabella' and I_O =='Inflows':
site_name=['ISB_fnf']
elif k =='Isabella' and I_O =='Outflows':
site_name=['ISB_otf']
elif k =='Yuba' and I_O =='Inflows':
site_name=['YRS_fnf']
elif k =='Yuba' and I_O =='Outflows':
site_name=['YRS_otf']
else:
None
flow_ts = df_sim.loc[:,site_name].values
annual = flow_ts[year*365:year*365+365]
max_flow = np.max(annual[105:260])
L = list(annual)
peak_flow = L.index(max_flow)
# iterate through every day of the year
for day in range(0,365):
for fd in range(0,f_horizon):
s = day + fd
#forecast day? if not, take beginning storage from previous time step
if day>1 and fd < 1:
surplus = STOR[year*365+day-1]
elif day<1 and fd <1:
surplus = 0
else:
pass
# available hydro production based on water availability
avail_power = flow_ts[year*365+day]*eff
# if it's still winter, operate as RoR
if s < peak_flow - win_date:
if avail_power >= upper:
gen = upper
surplus = surplus + (avail_power - upper)
else:
gen = avail_power
# if it's spring, operate as RoR with upper limit
elif s >= peak_flow - win_date and s < peak_flow - spr_date:
if avail_power > spr_cap:
surplus = surplus + (avail_power - spr_cap)
gen = spr_cap
elif avail_power <= spr_cap:
deficit = spr_cap - avail_power
if surplus>0:
transfer = np.min((surplus,deficit))
surplus = surplus - transfer
else:
transfer = 0
gen = avail_power + transfer
# if it's summer, operate as RoR with upper limit
elif s >= peak_flow - spr_date and s < peak_flow + sum_date:
if avail_power > sum_cap:
surplus = surplus + (avail_power - sum_cap)
gen = sum_cap
elif avail_power <= sum_cap:
deficit = sum_cap - avail_power
if surplus>0:
transfer = np.min((surplus,deficit))
surplus = surplus - transfer
else:
transfer = 0
gen = avail_power + transfer
# if it's fall, operate as RoR with upper limit
elif s >= peak_flow + sum_date and s < peak_flow + fall_date:
if avail_power > fall_cap:
surplus = surplus + (avail_power - fall_cap)
gen = fall_cap
elif avail_power <= fall_cap:
deficit = fall_cap - avail_power
if surplus>0:
transfer = np.min((surplus,deficit))
surplus = surplus - transfer
else:
transfer = 0
gen = avail_power + transfer
elif s >= peak_flow + fall_date:
if avail_power >= upper:
gen = upper
surplus = surplus + (avail_power - upper)
else:
gen = avail_power
else:
if avail_power >= upper:
gen = upper
surplus = surplus + (avail_power - upper)
else:
gen = avail_power
GEN[day,fd] = gen
if fd < 1:
STOR[year*365+day] = surplus
if year < 1:
A_SCE = GEN
else:
A_SCE = np.vstack((A_SCE,GEN))
if name in SCE_No_Data_Dams:
pass
else:
SCE_name_list = np.append(SCE_name_list,name)
name_index = SCE_dams.index(name)
if name_index < 1:
M_SCE = A_SCE
else:
M_SCE = np.dstack((M_SCE,A_SCE))
for i in range(0,len(SCE_name_list)):
name = SCE_name_list[i]
filename = 'CA_hydropower/' + name + '_hydro.csv'
gen = M_SCE[:,:,i]
df_gen = pd.DataFrame(gen)
df_gen.columns = ['fd1','fd2','fd3','fd4','fd5','fd6','fd7']
df_gen.to_csv(filename)
PGE_total=np.sum(M_PGE,axis=2)
SCE_total=np.sum(M_SCE,axis=2)
# more maximum generation constraints
for i in range(0,len(PGE_total)):
for fd in range(0,f_horizon):
PGE_total[i,fd] = np.min((PGE_total[i,fd],851000/7))
SCE_total[i,fd] = np.min((SCE_total[i,fd],153000/7))
# Cut first year and last two years
PGE_total = PGE_total[365:-730]
SCE_total = SCE_total[365:-730]
df_PGE = pd.DataFrame(PGE_total)
df_PGE.columns = ['fd1','fd2','fd3','fd4','fd5','fd6','fd7']
df_SCE = pd.DataFrame(SCE_total)
df_SCE.columns = ['fd1','fd2','fd3','fd4','fd5','fd6','fd7']
df_PGE.to_csv('CA_hydropower/PGE_valley_hydro.csv')
df_SCE.to_csv('CA_hydropower/SCE_hydro.csv')
# #Forecast analysis
# TG = PGE_total
# differences=np.zeros((len(PGE_total)-f_horizon+1,f_horizon))
# for i in range(0,len(PGE_total)-f_horizon+1):
# differences[i,:] = (TG[i,:] - TG[i:i+f_horizon,0])/1000
#
# month_ID = np.zeros(((sim_years-1)*365,1))
# for i in range(0,sim_years-1):
# month_ID[i*365+0:i*365+31] = 1
# month_ID[i*365+31:i*365+59]=2
# month_ID[i*365+59:i*365+90]=3
# month_ID[i*365+90:i*365+120]=4
# month_ID[i*365+120:i*365+151]=5
# month_ID[i*365+151:i*365+181]=6
# month_ID[i*365+181:i*365+212]=7
# month_ID[i*365+212:i*365+243]=8
# month_ID[i*365+243:i*365+273]=9
# month_ID[i*365+273:i*365+304]=10
# month_ID[i*365+304:i*365+334]=11
# month_ID[i*365+334:i*365+365]=12
#
# month_ID = month_ID[:-6]
#
# combined = np.column_stack((differences,month_ID))
# df_combined = pd.DataFrame(combined)
# df_combined.columns = ['1','2','3','4','5','6','7','Month']
#
# plt.figure()
#
# months = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
# for i in range(0,12):
# plt.subplot(4,3,i+1)
#
# month_selection = df_combined.loc[df_combined['Month']==i+1,:]
#
# for j in range(0,len(month_selection)):
#
# plt.plot(month_selection.iloc[j,0:f_horizon])
#
# if i ==6:
# plt.ylabel('Difference (GWh)',fontweight='bold')
# if i == 10:
# plt.xlabel('Forecast Horizon (Days)',fontweight='bold')
# plt.title(months[i],fontweight='bold')
# plt.ylim([-120,120])
# plt.subplots_adjust(wspace=0.6,hspace=1.2)
#
# plt.savefig('PGE_perfect_foresight.png', dpi=2000)
#
#
# #Forecast analysis
# TG = SCE_total
# differences=np.zeros((len(SCE_total)-f_horizon+1,f_horizon))
# for i in range(0,len(SCE_total)-f_horizon+1):
# differences[i,:] = (TG[i,:] - TG[i:i+f_horizon,0])/1000
#
# month_ID = np.zeros(((sim_years-1)*365,1))
# for i in range(0,sim_years-1):
# month_ID[i*365+0:i*365+31] = 1
# month_ID[i*365+31:i*365+59]=2
# month_ID[i*365+59:i*365+90]=3
# month_ID[i*365+90:i*365+120]=4
# month_ID[i*365+120:i*365+151]=5
# month_ID[i*365+151:i*365+181]=6
# month_ID[i*365+181:i*365+212]=7
# month_ID[i*365+212:i*365+243]=8
# month_ID[i*365+243:i*365+273]=9
# month_ID[i*365+273:i*365+304]=10
# month_ID[i*365+304:i*365+334]=11
# month_ID[i*365+334:i*365+365]=12
#
# month_ID = month_ID[:-6]
#
# combined = np.column_stack((differences,month_ID))
# df_combined = pd.DataFrame(combined)
# df_combined.columns = ['1','2','3','4','5','6','7','Month']
#
# plt.figure()
#
# months = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
# for i in range(0,12):
# plt.subplot(4,3,i+1)
#
# month_selection = df_combined.loc[df_combined['Month']==i+1,:]
#
# for j in range(0,len(month_selection)):
#
# plt.plot(month_selection.iloc[j,0:f_horizon])
#
# if i ==6:
# plt.ylabel('Difference (GWh)',fontweight='bold')
# if i == 10:
# plt.xlabel('Forecast Horizon (Days)',fontweight='bold')
# plt.title(months[i],fontweight='bold')
# plt.ylim([-25,25])
# plt.subplots_adjust(wspace=0.6,hspace=1.2)
#
# plt.savefig('SCE_perfect_foresight.png', dpi=2000)
return None
| 43.06402
| 202
| 0.413879
| 3,582
| 33,633
| 3.676159
| 0.098548
| 0.01048
| 0.025061
| 0.030073
| 0.754025
| 0.734964
| 0.732154
| 0.717117
| 0.70527
| 0.68788
| 0
| 0.041922
| 0.481551
| 33,633
| 780
| 203
| 43.119231
| 0.713253
| 0.161746
| 0
| 0.767296
| 0
| 0
| 0.079703
| 0.013983
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002096
| false
| 0.014675
| 0.010482
| 0
| 0.014675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
90f68218f79b9893f94b729cef57c284c3d07a71
| 192
|
py
|
Python
|
medium/sum_nums.py
|
SeulYoung/leetcode
|
a3d7c0d025b20f1bba3daddd7681e2a211b83d90
|
[
"BSD-3-Clause"
] | null | null | null |
medium/sum_nums.py
|
SeulYoung/leetcode
|
a3d7c0d025b20f1bba3daddd7681e2a211b83d90
|
[
"BSD-3-Clause"
] | null | null | null |
medium/sum_nums.py
|
SeulYoung/leetcode
|
a3d7c0d025b20f1bba3daddd7681e2a211b83d90
|
[
"BSD-3-Clause"
] | null | null | null |
def sumNums(n: int) -> int:
return sum(range(1, n + 1))
def sumNums(n: int) -> int:
# python的and操作如果最后结果为真,返回最后一个表达式的值,or操作如果结果为真,返回第一个结果为真的表达式的值
return n and n + sumNums(n - 1)
| 24
| 65
| 0.661458
| 27
| 192
| 4.703704
| 0.481481
| 0.188976
| 0.173228
| 0.220472
| 0.267717
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.203125
| 192
| 7
| 66
| 27.428571
| 0.810458
| 0.307292
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
90f6d602f07713c4d41f996eeb01ebaeceab1b91
| 5,160
|
py
|
Python
|
test_logger.py
|
rzzzwilson/logger
|
854a21b8fa1ac488a9b1de57bd65bc6298250bd4
|
[
"MIT"
] | null | null | null |
test_logger.py
|
rzzzwilson/logger
|
854a21b8fa1ac488a9b1de57bd65bc6298250bd4
|
[
"MIT"
] | 1
|
2015-11-29T07:03:45.000Z
|
2015-11-29T07:03:45.000Z
|
test_logger.py
|
rzzzwilson/logger
|
854a21b8fa1ac488a9b1de57bd65bc6298250bd4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Test the simple logger.
"""
import os
import unittest
import logger
class TestLog(unittest.TestCase):
def testSimple(self):
"""A simple 'smoke test' for the logging module."""
import logger
logfilename = 'xyzzy.log'
# start logging, write some test logs, close log
log = logger.Log(logfilename, logger.Log.DEBUG)
log('test')
log.debug('DEBUG: test')
log.info('INFO: test')
log('WARN: test', logger.Log.WARN)
log.error('ERROR: test')
log.critical('CRITICAL: test')
del log
# check contents of the logfile
with open(logfilename, 'r') as fd:
lines = fd.readlines()
# drop first three lines and get last field of remaining lines
last_field = []
for l in lines[3:]:
end_field = l.split('|')[-1]
last_field.append(end_field)
expected = [
'Logging level set to 10 (DEBUG)\n',
'test\n',
'DEBUG: test\n',
'INFO: test\n',
'WARN: test\n',
'ERROR: test\n',
'CRITICAL: test\n',
]
msg = ('Got error comparing last fields, expected:\n%s\ngot:\n%s'
% (''.join(expected), ''.join(last_field)))
self.assertEqual(expected, last_field, msg)
os.remove(logfilename)
def testLevel(self):
"""A test setting the debug level up."""
import logger
logfilename = 'xyzzy2.log'
# start logging at WARN, write some test logs, close log
log = logger.Log(logfilename, logger.Log.WARN)
log('test')
log.debug('DEBUG: test')
log.info('INFO: test')
log.warn('WARN: test')
log.error('ERROR: test')
log.critical('CRITICAL: test')
del log
# check contents of the logfile
with open(logfilename, 'r') as fd:
lines = fd.readlines()
# get last field of each line (ignore first 3 lines)
last_field = []
for l in lines[3:]:
end_field = l.split('|')[-1]
last_field.append(end_field)
# build the expected output
expected = [
'Logging level set to 30 (WARN)\n',
'test\n',
'WARN: test\n',
'ERROR: test\n',
'CRITICAL: test\n'
]
msg = ('Got error comparing last fields, expected:\n%s\ngot:\n%s'
% (''.join(expected), ''.join(last_field)))
self.assertEqual(expected, last_field, msg)
os.remove(logfilename)
def testNolevel(self):
"""A test where the level is not specified."""
import logger
logfilename = 'xyzzy3.log'
# start logging, write some test logs, close log
log = logger.Log(logfilename)
log('test')
log.debug('DEBUG: test')
log.info('INFO: test')
log.warn('WARN: test')
log.error('ERROR: test')
log.critical('CRITICAL: test')
del log
# check contents of the logfile
with open(logfilename, 'r') as fd:
lines = fd.readlines()
# drop first three lines and get last field of remaining lines
last_field = []
for l in lines[3:]:
end_field = l.split('|')[-1]
last_field.append(end_field)
expected = [
'Logging level set to 00 (NOTSET)\n',
'test\n',
'DEBUG: test\n',
'INFO: test\n',
'WARN: test\n',
'ERROR: test\n',
'CRITICAL: test\n',
]
msg = ('Got error comparing last fields, expected:\n%s\ngot:\n%s'
% (''.join(expected), ''.join(last_field)))
self.assertEqual(expected, last_field, msg)
os.remove(logfilename)
def testMultiLine(self):
"""A test where a multi-line log is made."""
import logger
logfilename = 'xyzzy4.log'
# start logging, write some test logs, close log
log = logger.Log(logfilename)
log('test')
log('first line\nsecond line\nthird line')
del log
# check contents of the logfile
with open(logfilename, 'r') as fd:
lines = fd.readlines()
# drop first three lines and get last field of remaining lines
last_field = []
for l in lines[3:]:
end_field = l.split('|')[-1]
last_field.append(end_field)
expected = [
'Logging level set to 00 (NOTSET)\n',
'test\n',
'first line\n',
'second line\n',
'third line\n'
]
msg = ('Got error comparing last fields, expected:\n%s\ngot:\n%s'
% (''.join(expected), ''.join(last_field)))
self.assertEqual(expected, last_field, msg)
os.remove(logfilename)
unittest.main()
| 28.351648
| 73
| 0.505039
| 588
| 5,160
| 4.391156
| 0.178571
| 0.069713
| 0.035631
| 0.026336
| 0.773431
| 0.763749
| 0.763749
| 0.763749
| 0.763749
| 0.763749
| 0
| 0.006192
| 0.374031
| 5,160
| 181
| 74
| 28.508287
| 0.793189
| 0.151357
| 0
| 0.777778
| 0
| 0
| 0.197556
| 0.021208
| 0
| 0
| 0
| 0
| 0.034188
| 1
| 0.034188
| false
| 0
| 0.059829
| 0
| 0.102564
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
90f98f14b83d5aa8ed3d85484a4e3a1f3bbc00ce
| 176
|
py
|
Python
|
app/transform/utils.py
|
n1EzeR/reviews_parser
|
d264dca1ebf434d6d82873222b451e3a867f3ad3
|
[
"CC0-1.0"
] | null | null | null |
app/transform/utils.py
|
n1EzeR/reviews_parser
|
d264dca1ebf434d6d82873222b451e3a867f3ad3
|
[
"CC0-1.0"
] | 1
|
2020-04-07T07:02:03.000Z
|
2020-04-07T07:02:03.000Z
|
app/transform/utils.py
|
n1EzeR/reviews_parser
|
d264dca1ebf434d6d82873222b451e3a867f3ad3
|
[
"CC0-1.0"
] | 2
|
2020-05-03T13:53:31.000Z
|
2020-05-28T23:35:34.000Z
|
from app.utils import get_latest_date_in_dir
def get_localized_latest_subdirectory(directory):
localized = f"../{directory}"
return get_latest_date_in_dir(localized)
| 25.142857
| 49
| 0.801136
| 25
| 176
| 5.2
| 0.6
| 0.138462
| 0.2
| 0.230769
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119318
| 176
| 6
| 50
| 29.333333
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0.079545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
29045b01efeb237bc7af780a2888932cd423ebb1
| 64
|
py
|
Python
|
pypy/lib/marshal.py
|
camillobruni/pygirl
|
ddbd442d53061d6ff4af831c1eab153bcc771b5a
|
[
"MIT"
] | 12
|
2016-01-06T07:10:28.000Z
|
2021-05-13T23:02:02.000Z
|
pypy/lib/marshal.py
|
camillobruni/pygirl
|
ddbd442d53061d6ff4af831c1eab153bcc771b5a
|
[
"MIT"
] | null | null | null |
pypy/lib/marshal.py
|
camillobruni/pygirl
|
ddbd442d53061d6ff4af831c1eab153bcc771b5a
|
[
"MIT"
] | 2
|
2016-07-29T07:09:50.000Z
|
2016-10-16T08:50:26.000Z
|
# temporary
from _marshal import __doc__
from _marshal import *
| 16
| 28
| 0.8125
| 8
| 64
| 5.75
| 0.625
| 0.478261
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 64
| 3
| 29
| 21.333333
| 0.851852
| 0.140625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
2919e448a051055590c67e04c408a2cf5c2b3d2a
| 8,468
|
py
|
Python
|
scripts/modules/patch_flair_parsing.py
|
therosko/Thesis-NER-in-English-novels
|
7988c3aa4f904e91b1e674090dbdc6487b4ad042
|
[
"Apache-2.0"
] | null | null | null |
scripts/modules/patch_flair_parsing.py
|
therosko/Thesis-NER-in-English-novels
|
7988c3aa4f904e91b1e674090dbdc6487b4ad042
|
[
"Apache-2.0"
] | null | null | null |
scripts/modules/patch_flair_parsing.py
|
therosko/Thesis-NER-in-English-novels
|
7988c3aa4f904e91b1e674090dbdc6487b4ad042
|
[
"Apache-2.0"
] | null | null | null |
##############################################################
# Fixing missmatches in parsing of Flair output
# The script also does not claim to be usable for any given text,
# instead it merely adapts the Flair output files to correspond
# to the entity split style of LitBank.
# Input: pandas dataframe, filename
# Output: pandas dataframe
##############################################################
import pandas as pd
def patch_flair(df, filename):
'''
The parsing sometimes has lead to different tokens, which we fix in this step.
'''
list_to_drop = []
for i in df[df['original_word'].str.endswith('.’')==True].index.tolist():
fixed_words = [df.loc[i,'original_word'][:-2],".","’"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word'].str.endswith('.')==True].index.tolist():
if df.loc[i,'original_word'] not in ['.',"Mr.", "Mrs.", "St.", "Dr.","P.","M.","R.","P.S.",'...',"g.","p.","i."]:
fixed_words = [df.loc[i,'original_word'][:-1],"."]
df.loc[i,'original_word'] = fixed_words
elif df.loc[i-1,'original_word'] == '...' and df.loc[i+1,'original_word'] == "He":
list_to_drop.append(i)
else:
pass
for i in df[df['original_word'].str.endswith('’')==True].index.tolist():
if df.loc[i,'original_word'] != '’':
fixed_words = [df.loc[i,'original_word'][:-1],"’"]
df.loc[i,'original_word'] = fixed_words
else:
pass
for i in df[df['original_word'].str.endswith('--')==True].index.tolist():
if df.loc[i,'original_word'][:-2] in ["?","’",":","_","!",";",","] and df.loc[i,'original_word'] != "--":
fixed_words = [df.loc[i,'original_word'][:-2],"--"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word'].str.endswith("'")==True].index.tolist():
if df.loc[i,'original_word'] != "'":
fixed_words = [df.loc[i,'original_word'][:-1],"'"]
df.loc[i,'original_word'] = fixed_words
else:
pass
for i in df[df['original_word']==';”'].index.tolist():
fixed_words = [';','”']
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="cannot"].index.tolist():
fixed_words = ["can","not"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="'"].index.tolist():
if df['original_word'].iloc[i+1] in ["m"]:
df.loc[i,'original_word'] = "'"+ df.loc[i+1,'original_word']
list_to_drop.append(i+1)
#ALICE IN WONDERLAND
if filename == "AliceInWonderland.tsv":
for i in df[df['original_word']=="‘--"].index.tolist():
fixed_words = ["‘","--"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="[‘"].index.tolist():
fixed_words = ["[","‘"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']==",)"].index.tolist():
fixed_words = [",",")"]
df.loc[i,'original_word'] = fixed_words
# FRANKENSTEIN
elif filename == "Frankenstein.tsv":
for i in df[df['original_word']=='""""'].index.tolist():
if df['original_word'].iloc[i+1] in ['What']:
df.loc[i,'original_word'] = '"'
else:
continue
for i in df[df['original_word']=='"!"""'].index.tolist():
fixed_words = ["!",'"']
df.loc[i,'original_word'] = fixed_words
#PRIDE AND PREJUDICE
elif filename == "PrideAndPrejudice.tsv":
for i in df[df['original_word']==',”'].index.tolist():
fixed_words = [',','”']
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=='“_'].index.tolist():
fixed_words = ['“','_']
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=='_.”'].index.tolist():
fixed_words = ['_','.','”']
df.loc[i,'original_word'] = fixed_words
#HUCKLEBERRY FINN
elif filename == "HuckleberryFinn.tsv":
for i in df[df['original_word']=="cannot"].index.tolist():
fixed_words = ["can","not"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="sumf'n"].index.tolist():
if df['original_word'].iloc[i+1] == '.':
fixed_words = ["sumf","'","n."]
df.loc[i,'original_word'] = fixed_words
list_to_drop.append(i+1)
else:
continue
for i in df[df['original_word']=="more'n"].index.tolist():
fixed_words = ["more","'n"]
df.loc[i,'original_word'] = fixed_words
#DRACULA
elif filename == "Dracula.tsv":
for i in df[df['original_word']=="_)"].index.tolist():
fixed_words = ["_",")"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="(_"].index.tolist():
fixed_words = ["(","_"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="_--"].index.tolist():
fixed_words = ["_","--"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']==":--"].index.tolist():
fixed_words = [":","--"]
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="P"].index.tolist():
if df['original_word'].iloc[i+1] == '.':
df.loc[i,'original_word'] = "P."
list_to_drop.append(i+1)
else:
continue
for i in df[df['original_word']=='""""'].index.tolist():
if df['original_word'].iloc[i+1] in ['paprika','mamaliga','impletata']:
df.loc[i,'original_word'] = '"'
else:
continue
for i in df[df['original_word']=='""""'].index.tolist():
df.loc[i,'original_word'] = '"'
for i in df[df['original_word']=='","""'].index.tolist():
if df['original_word'].iloc[i+1] in ['and','I']:
fixed_words = [",",'"']
df.loc[i,'original_word'] = fixed_words
else:
continue
#VANITY FAIR
elif filename == "VanityFair.tsv":
for i in df[df['original_word']=='""""'].index.tolist():
df.loc[i,'original_word'] = '"'
for i in df[df['original_word']=='","""'].index.tolist():
fixed_words = [",",'"']
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=='"?"""'].index.tolist():
fixed_words = ["?",'"']
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=='"!"""'].index.tolist():
fixed_words = ["!",'"']
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="'"].index.tolist():
if df['original_word'].iloc[i+1] == "tis":
df.loc[i,'original_word'] = "'t"
df.loc[i+1,'original_word'] = "is"
#OLIVER TWIST
elif filename == "OliverTwist.tsv":
for i in df[df['original_word']=="'"].index.tolist():
if df['original_word'].iloc[i+1] in ["s","em","ll","S"]:
df.loc[i,'original_word'] = "'"+ df.loc[i+1,'original_word']
list_to_drop.append(i+1)
for i in df[df['original_word']=="TWIST'S"].index.tolist():
fixed_words = ["TWIST","'S"]
df.loc[i,'original_word'] = fixed_words
#THE CALL OF THE WILD
elif filename == "TheCallOfTheWild.tsv":
for i in df[df['original_word']==',”'].index.tolist():
fixed_words = [',','”']
df.loc[i,'original_word'] = fixed_words
for i in df[df['original_word']=="'"].index.tolist():
if df['original_word'].iloc[i+1] in ["'m"]:
list_to_drop.append(i)
for i in df[df['original_word'].str.endswith('--”')==True].index.tolist():
fixed_words = ['--','”']
df.loc[i,'original_word'] = fixed_words
df = df.drop(df.index[[list_to_drop]])
df = df.assign(original_word=df['original_word']).explode('original_word')
df = df.reset_index(drop=True)
if filename == "MobyDick.tsv":
df = df.drop(df.index[[381,382,539]])
df = df.reset_index(drop=True)
return df
| 45.283422
| 121
| 0.516179
| 1,072
| 8,468
| 3.903918
| 0.117537
| 0.286738
| 0.073118
| 0.153883
| 0.783274
| 0.764158
| 0.735006
| 0.723059
| 0.706571
| 0.68172
| 0
| 0.005288
| 0.26299
| 8,468
| 187
| 122
| 45.283422
| 0.665278
| 0.055149
| 0
| 0.538462
| 0
| 0
| 0.221429
| 0.005351
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00641
| false
| 0.019231
| 0.00641
| 0
| 0.019231
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
29730123143e03cf26e04dff057633a99dad1767
| 6,458
|
py
|
Python
|
tests/cleaning.py
|
kais-siala/wurst
|
448dd4e9e0bfbde956c2913222222509ff2b14e1
|
[
"BSD-2-Clause"
] | null | null | null |
tests/cleaning.py
|
kais-siala/wurst
|
448dd4e9e0bfbde956c2913222222509ff2b14e1
|
[
"BSD-2-Clause"
] | null | null | null |
tests/cleaning.py
|
kais-siala/wurst
|
448dd4e9e0bfbde956c2913222222509ff2b14e1
|
[
"BSD-2-Clause"
] | null | null | null |
from wurst.transformations.cleaning import *
def test_empty_market_dataset():
given = {
"exchanges": [
{
"name": "market group for transport, freight train",
"product": "transport, freight train",
"type": "technosphere",
},
{
"location": "GLO",
"name": "market for diazine-compound",
"product": "diazine-compound",
"type": "production",
},
{
"location": "RER",
"name": "diazine-compound production",
"product": "diazine-compound",
"type": "technosphere",
},
{
"location": "RoW",
"name": "diazine-compound production",
"product": "diazine-compound",
"type": "technosphere",
},
{
"location": "RoW",
"name": "diazine-compound production",
"product": "diazine-compound",
"type": "substitution",
},
{
"location": "RoW",
"name": "still there",
"product": "diazine-compound",
"type": "technosphere",
},
],
"name": "market for diazine-compound",
"reference product": "diazine-compound",
}
first = {
"exchanges": [
{
"name": "market group for transport, freight train",
"product": "transport, freight train",
"type": "technosphere",
},
{
"location": "GLO",
"name": "market for diazine-compound",
"product": "diazine-compound",
"type": "production",
},
{
"location": "RoW",
"name": "diazine-compound production",
"product": "diazine-compound",
"type": "substitution",
},
{
"location": "RoW",
"name": "still there",
"product": "diazine-compound",
"type": "technosphere",
},
],
"name": "market for diazine-compound",
"reference product": "diazine-compound",
}
second = {
"exchanges": [
{
"name": "market group for transport, freight train",
"product": "transport, freight train",
"type": "technosphere",
},
{
"location": "GLO",
"name": "market for diazine-compound",
"product": "diazine-compound",
"type": "production",
},
{
"location": "RoW",
"name": "diazine-compound production",
"product": "diazine-compound",
"type": "substitution",
},
],
"name": "market for diazine-compound",
"reference product": "diazine-compound",
}
assert empty_market_dataset(given, exclude=["still there"]) == first
assert empty_market_dataset(given) == second
def test_delete_zero_amount_exchanges():
given = [
{
"exchanges": [
{"type": "foo", "amount": 0},
{"type": "bar", "amount": 1},
{"type": "baz", "amount": 0},
]
}
]
first = [
{
"exchanges": [
{"type": "foo", "amount": 0},
{"type": "bar", "amount": 1},
]
}
]
second = [
{
"exchanges": [
{"type": "bar", "amount": 1},
]
}
]
assert delete_zero_amount_exchanges(given, drop_types=["baz"]) == first
assert delete_zero_amount_exchanges(given) == second
def test_remove_exchange_fields_with_nones():
given = [
{
"exchanges": [
{
"name": "market group for transport, freight train",
"product": None,
"type": "",
}
],
"name": "market for diazine-compound",
"reference product": "diazine-compound",
}
]
expected = [
{
"exchanges": [
{"name": "market group for transport, freight train", "type": ""}
],
"name": "market for diazine-compound",
"reference product": "diazine-compound",
}
]
assert remove_exchange_fields_with_nones(given) == expected
def test_add_metadata_to_production_exchanges():
given = [
{
"exchanges": [
{"name": "D", "unit": "km", "type": "technosphere"},
{"name": "Y", "unit": "km", "type": "technosphere"},
{"name": "A", "type": "production"},
{
"name": "Z",
"location": "somewhere",
"unit": "ton",
"type": "production",
},
],
"name": "A",
"reference product": "B",
"location": "C",
"unit": "kg",
},
{
"exchanges": [],
"name": "D",
"reference product": "E",
"location": "F",
"unit": "km",
},
]
expected = [
{
"exchanges": [
{"name": "D", "unit": "km", "location": "F", "type": "technosphere"},
{"name": "Y", "unit": "km", "type": "technosphere"},
{"name": "A", "location": "C", "type": "production"},
{
"name": "Z",
"location": "somewhere",
"unit": "ton",
"type": "production",
},
],
"name": "A",
"reference product": "B",
"location": "C",
"unit": "kg",
},
{
"exchanges": [],
"name": "D",
"reference product": "E",
"location": "F",
"unit": "km",
},
]
assert (
add_metadata_to_production_exchanges(
given, fields=("location",), matching_fields=("name", "unit")
)
== expected
)
| 29.898148
| 85
| 0.395324
| 431
| 6,458
| 5.837587
| 0.162413
| 0.166932
| 0.131161
| 0.103339
| 0.883545
| 0.835056
| 0.751987
| 0.751987
| 0.732909
| 0.703498
| 0
| 0.001693
| 0.451068
| 6,458
| 215
| 86
| 30.037209
| 0.708039
| 0
| 0
| 0.550725
| 0
| 0
| 0.323784
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 1
| 0.019324
| false
| 0
| 0.004831
| 0
| 0.024155
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
46203899171084a1244da6d656f45456dc10c239
| 12,788
|
py
|
Python
|
parser/fase2/team04/Tytus/Optimizacion/optimizaciones.py
|
Josue-Zea/tytus
|
f9e4be9a8c03eb698fade7a748972e4f52d46685
|
[
"MIT"
] | 35
|
2020-12-07T03:11:43.000Z
|
2021-04-15T17:38:16.000Z
|
parser/fase2/team04/Tytus/Optimizacion/optimizaciones.py
|
Josue-Zea/tytus
|
f9e4be9a8c03eb698fade7a748972e4f52d46685
|
[
"MIT"
] | 47
|
2020-12-09T01:29:09.000Z
|
2021-01-13T05:37:50.000Z
|
parser/fase2/team04/Tytus/Optimizacion/optimizaciones.py
|
Josue-Zea/tytus
|
f9e4be9a8c03eb698fade7a748972e4f52d46685
|
[
"MIT"
] | 556
|
2020-12-07T03:13:31.000Z
|
2021-06-17T17:41:10.000Z
|
from Optimizacion._def import Def
from Optimizacion.asignacion import Asignacion
from Optimizacion.operacion import Operacion
from Optimizacion.literal import Literal
def optimizacion_uno(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
if len(def_instructions) >= 2:
index = 0
while index < len(def_instructions) - 1:
first_inst = def_instructions[index]
second_inst = def_instructions[index + 1]
if isinstance(first_inst, Asignacion) and isinstance(second_inst, Asignacion):
if first_inst.izquierda.toString() == second_inst.derecha.toString() and first_inst.derecha.toString() == second_inst.izquierda.toString():
dic = {
'type': 'Tipo 1',
'before': f"{first_inst.toString()}<BR>{second_inst.toString()}",
'opt': f"{first_inst.toString()}",
'line': f"{first_inst.linea}",
}
arbol.addOpt(dic)
def_instructions.remove(second_inst)
continue
index += 1
def optimizacion_ocho(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() == operacion.izquierda.toString() and operacion.operador == '+' and operacion.derecha and operacion.derecha.toString() == '0':
dic = {
'type': 'Tipo 8',
'before': f"{def_inst.toString()}",
'opt': f"#Se elimimo la instruccion",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
def_instructions.remove(def_inst)
def optimizacion_nueve(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() == operacion.izquierda.toString() and operacion.operador == '-' and operacion.derecha and operacion.derecha.toString() == '0':
dic = {
'type': 'Tipo 9',
'before': f"{def_inst.toString()}",
'opt': f"#Se elimimo la instruccion",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
def_instructions.remove(def_inst)
def optimizacion_diez(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() == operacion.izquierda.toString() and operacion.operador == '*' and operacion.derecha and operacion.derecha.toString() == '1':
dic = {
'type': 'Tipo 10',
'before': f"{def_inst.toString()}",
'opt': f"#Se elimimo la instruccion",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
def_instructions.remove(def_inst)
def optimizacion_once(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() == operacion.izquierda.toString() and operacion.operador == '/' and operacion.derecha and operacion.derecha.toString() == '1':
dic = {
'type': 'Tipo 11',
'before': f"{def_inst.toString()}",
'opt': f"#Se elimimo la instruccion",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
def_instructions.remove(def_inst)
def optimizacion_doce(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() != operacion.izquierda.toString() and operacion.operador == '+' and operacion.derecha and operacion.derecha.toString() == '0':
dic = {
'type': 'Tipo 12',
'before': f"{def_inst.toString()}",
'opt': f"{def_inst.izquierda.toString()} = {operacion.izquierda.toString()}",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
literal = Literal(operacion.izquierda.toString(), operacion.linea)
def_inst.derecha = literal
def optimizacion_trece(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() != operacion.izquierda.toString() and operacion.operador == '-' and operacion.derecha and operacion.derecha.toString() == '0':
dic = {
'type': 'Tipo 13',
'before': f"{def_inst.toString()}",
'opt': f"{def_inst.izquierda.toString()} = {operacion.izquierda.toString()}",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
literal = Literal(operacion.izquierda.toString(), operacion.linea)
def_inst.derecha = literal
def optimizacion_catorce(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() != operacion.izquierda.toString() and operacion.operador == '*' and operacion.derecha and operacion.derecha.toString() == '1':
dic = {
'type': 'Tipo 14',
'before': f"{def_inst.toString()}",
'opt': f"{def_inst.izquierda.toString()} = {operacion.izquierda.toString()}",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
literal = Literal(operacion.izquierda.toString(), operacion.linea)
def_inst.derecha = literal
def optimizacion_quince(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() != operacion.izquierda.toString() and operacion.operador == '/' and operacion.derecha and operacion.derecha.toString() == '1':
dic = {
'type': 'Tipo 15',
'before': f"{def_inst.toString()}",
'opt': f"{def_inst.izquierda.toString()} = {operacion.izquierda.toString()}",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
literal = Literal(operacion.izquierda.toString(), operacion.linea)
def_inst.derecha = literal
def optimizacion_dieciseis(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() != operacion.izquierda.toString() and operacion.operador == '*' and operacion.derecha and operacion.derecha.toString() == '2':
dic = {
'type': 'Tipo 16',
'before': f"{def_inst.toString()}",
'opt': f"{def_inst.izquierda.toString()} = {operacion.izquierda.toString()} + {operacion.izquierda.toString()}",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
literal = Literal(operacion.izquierda.toString(), operacion.linea)
nueva_operacion = Operacion(literal, '+', literal, operacion.linea)
index = def_instructions.index(def_inst)
def_instructions[index] = nueva_operacion
def optimizacion_diecisiete(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() != operacion.izquierda.toString() and operacion.operador == '*' and operacion.derecha and operacion.derecha.toString() == '0':
dic = {
'type': 'Tipo 17',
'before': f"{def_inst.toString()}",
'opt': f"{def_inst.izquierda.toString()} = {operacion.derecha.toString()}",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
literal = Literal(0, operacion.linea)
def_inst.derecha = literal
def optimizacion_dieciocho(instrucciones, arbol):
for inst in instrucciones:
if isinstance(inst, Def):
def_instructions = inst.instrucciones
for def_inst in def_instructions:
if isinstance(def_inst, Asignacion) and isinstance(def_inst.derecha, Operacion):
operacion = def_inst.derecha
if def_inst.izquierda.toString() != operacion.derecha.toString() and operacion.operador == '/' and operacion.derecha and operacion.izquierda.toString() == '0':
dic = {
'type': 'Tipo 18',
'before': f"{def_inst.toString()}",
'opt': f"{def_inst.izquierda.toString()} = {operacion.izquierda.toString()}",
'line': f"{def_inst.linea}",
}
arbol.addOpt(dic)
literal = Literal(0, operacion.linea)
def_inst.derecha = literal
| 57.603604
| 179
| 0.514076
| 1,134
| 12,788
| 5.65873
| 0.059965
| 0.103631
| 0.036154
| 0.067321
| 0.872682
| 0.869565
| 0.869565
| 0.867384
| 0.858189
| 0.848995
| 0
| 0.004978
| 0.387394
| 12,788
| 222
| 180
| 57.603604
| 0.814143
| 0
| 0
| 0.671429
| 0
| 0.004762
| 0.109938
| 0.060677
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.019048
| 0
| 0.07619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
462c0102eaba75e6cfff0bab1ee3f7ddafd36233
| 200
|
py
|
Python
|
pandocinject/__init__.py
|
msprev/pandocinject
|
fa8ea62df5dcd9c049fc23c787ca6168f5269c3c
|
[
"BSD-3-Clause"
] | 4
|
2016-03-23T21:18:48.000Z
|
2020-03-15T15:23:32.000Z
|
pandocinject/__init__.py
|
msprev/pandocinject
|
fa8ea62df5dcd9c049fc23c787ca6168f5269c3c
|
[
"BSD-3-Clause"
] | null | null | null |
pandocinject/__init__.py
|
msprev/pandocinject
|
fa8ea62df5dcd9c049fc23c787ca6168f5269c3c
|
[
"BSD-3-Clause"
] | 1
|
2018-10-12T08:13:06.000Z
|
2018-10-12T08:13:06.000Z
|
# make these classes available as top level imports from package
from pandocinject.pandocinject import Injector
from pandocinject.formatter import Formatter
from pandocinject.selector import Selector
| 40
| 64
| 0.865
| 25
| 200
| 6.92
| 0.6
| 0.277457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115
| 200
| 4
| 65
| 50
| 0.977401
| 0.31
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
463d8abcf71efecb4d6482bc2dec252825b94518
| 190
|
py
|
Python
|
Python/code case/code_case_59.py
|
amazing-2020/pdf
|
8cd3f5f510a1c1ed89b51b1354f4f8c000c5b24d
|
[
"Apache-2.0"
] | 3
|
2021-01-01T13:08:24.000Z
|
2021-02-03T09:27:56.000Z
|
Python/code case/code_case_59.py
|
amazing-2020/pdf
|
8cd3f5f510a1c1ed89b51b1354f4f8c000c5b24d
|
[
"Apache-2.0"
] | null | null | null |
Python/code case/code_case_59.py
|
amazing-2020/pdf
|
8cd3f5f510a1c1ed89b51b1354f4f8c000c5b24d
|
[
"Apache-2.0"
] | null | null | null |
import code_case_58, sys
code_case_58.print_func('python')
print(code_case_58.fib(1000))
print(code_case_58.fib2(100))
print(code_case_58.__name__)
code_case_58.function()
print(dir(sys))
| 19
| 33
| 0.805263
| 35
| 190
| 3.885714
| 0.428571
| 0.352941
| 0.441176
| 0.330882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111732
| 0.057895
| 190
| 9
| 34
| 21.111111
| 0.648045
| 0
| 0
| 0
| 0
| 0
| 0.031579
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.142857
| 0
| 0.142857
| 0.714286
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
463f574c01744f288bbe1a1edc65aa8b9d5e6d8a
| 5,326
|
py
|
Python
|
test/functional/genesis_upgrade_tests/max_ops_per_scrpt.py
|
JimmyNLose/Sant_rOyOS_SV
|
829036a6770fee27bad506911b245e9161795568
|
[
"OML"
] | null | null | null |
test/functional/genesis_upgrade_tests/max_ops_per_scrpt.py
|
JimmyNLose/Sant_rOyOS_SV
|
829036a6770fee27bad506911b245e9161795568
|
[
"OML"
] | null | null | null |
test/functional/genesis_upgrade_tests/max_ops_per_scrpt.py
|
JimmyNLose/Sant_rOyOS_SV
|
829036a6770fee27bad506911b245e9161795568
|
[
"OML"
] | 1
|
2020-06-13T18:41:58.000Z
|
2020-06-13T18:41:58.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2019 Bitcoin Association
# Copyright (c) 2020* Jimmy N. Lose
# * Gregorian calendar years
# Distributed under the Open BSV software license, see the accompanying file LICENSE.
from genesis_upgrade_tests.test_base import GenesisHeightBasedSimpleTestsCase
from test_framework.height_based_test_framework import SimpleTestDefinition
from test_framework.script import CScript, OP_TRUE, OP_NOP, OP_DUP, OP_DROP
from test_framework.cdefs import MAX_OPS_PER_SCRIPT_BEFORE_GENESIS
class MaxOpsPerScriptTestWithPolicy(GenesisHeightBasedSimpleTestsCase):
ARGS = GenesisHeightBasedSimpleTestsCase.ARGS + ['-banscore=1000000', '-whitelist=127.0.0.1', '-maxopsperscriptpolicy=1000']
NAME = "Max operations per script limit with maxopsperscriptpolicy set to 1000"
TESTS = [
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * MAX_OPS_PER_SCRIPT_BEFORE_GENESIS),
"PRE-GENESIS", b""
),
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * (MAX_OPS_PER_SCRIPT_BEFORE_GENESIS + 1)),
"PRE-GENESIS", b"",
p2p_reject_reason=b'genesis-script-verify-flag-failed (Operation limit exceeded)',
block_reject_reason=b'blk-bad-inputs'
),
SimpleTestDefinition("MEMPOOL AT GENESIS", CScript([OP_TRUE] + [OP_NOP] * MAX_OPS_PER_SCRIPT_BEFORE_GENESIS),
"MEMPOOL AT GENESIS", b"",
),
SimpleTestDefinition("MEMPOOL AT GENESIS", CScript([OP_TRUE] + [OP_NOP] * (MAX_OPS_PER_SCRIPT_BEFORE_GENESIS + 1)),
"MEMPOOL AT GENESIS", b"",
p2p_reject_reason=b'genesis-script-verify-flag-failed (Operation limit exceeded)',
block_reject_reason=b'blk-bad-inputs'
),
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * MAX_OPS_PER_SCRIPT_BEFORE_GENESIS),
"GENESIS", b""
),
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * (MAX_OPS_PER_SCRIPT_BEFORE_GENESIS + 1)),
"GENESIS", b"",
p2p_reject_reason=b'genesis-script-verify-flag-failed (Operation limit exceeded)',
block_reject_reason=b'blk-bad-inputs'
),
SimpleTestDefinition("GENESIS", CScript([OP_TRUE] + [OP_NOP] * 1000),
"GENESIS", b""
),
SimpleTestDefinition("GENESIS", CScript([OP_TRUE] + [OP_NOP] * 1001),
"GENESIS", b"",
p2p_reject_reason=b'non-mandatory-script-verify-flag (Operation limit exceeded)'
),
]
class MaxOpsPerScriptTest(GenesisHeightBasedSimpleTestsCase):
ARGS = GenesisHeightBasedSimpleTestsCase.ARGS + ['-banscore=1000000', '-whitelist=127.0.0.1']
NAME = "Max operations per script limit, maxopsperscriptpolicy not defined"
TESTS = [
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * MAX_OPS_PER_SCRIPT_BEFORE_GENESIS),
"PRE-GENESIS", b""
),
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * (MAX_OPS_PER_SCRIPT_BEFORE_GENESIS + 1)),
"PRE-GENESIS", b"",
p2p_reject_reason=b'genesis-script-verify-flag-failed (Operation limit exceeded)',
block_reject_reason=b'blk-bad-inputs'
),
SimpleTestDefinition("MEMPOOL AT GENESIS", CScript([OP_TRUE] + [OP_NOP] * MAX_OPS_PER_SCRIPT_BEFORE_GENESIS),
"MEMPOOL AT GENESIS", b"",
),
SimpleTestDefinition("MEMPOOL AT GENESIS", CScript([OP_TRUE] + [OP_NOP] * (MAX_OPS_PER_SCRIPT_BEFORE_GENESIS + 1)),
"MEMPOOL AT GENESIS", b"",
p2p_reject_reason=b'genesis-script-verify-flag-failed (Operation limit exceeded)',
block_reject_reason=b'blk-bad-inputs'
),
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * MAX_OPS_PER_SCRIPT_BEFORE_GENESIS),
"GENESIS", b""
),
SimpleTestDefinition("PRE-GENESIS", CScript([OP_TRUE] + [OP_NOP] * (MAX_OPS_PER_SCRIPT_BEFORE_GENESIS + 1)),
"GENESIS", b"",
p2p_reject_reason=b'genesis-script-verify-flag-failed (Operation limit exceeded)',
block_reject_reason=b'blk-bad-inputs'
),
SimpleTestDefinition("GENESIS", CScript([OP_TRUE] + [OP_NOP] * (MAX_OPS_PER_SCRIPT_BEFORE_GENESIS + 1)),
"GENESIS", b""
),
]
| 64.168675
| 128
| 0.546752
| 498
| 5,326
| 5.568273
| 0.178715
| 0.051929
| 0.075009
| 0.086549
| 0.789758
| 0.783267
| 0.742157
| 0.725929
| 0.725929
| 0.725929
| 0
| 0.018912
| 0.354675
| 5,326
| 82
| 129
| 64.95122
| 0.787896
| 0.03849
| 0
| 0.777778
| 0
| 0
| 0.212275
| 0.058444
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4650054338d71bba8b7d36f167d55011baa4917a
| 922
|
py
|
Python
|
script/stow_process/__init__.py
|
amazon-picking-challenge/team_pfn
|
2f76524b067d816d8407f6c4fae4e6d33939c024
|
[
"Apache-2.0"
] | 7
|
2016-09-04T02:07:04.000Z
|
2017-05-25T02:31:07.000Z
|
script/stow_process/__init__.py
|
amazon-picking-challenge/team_pfn
|
2f76524b067d816d8407f6c4fae4e6d33939c024
|
[
"Apache-2.0"
] | null | null | null |
script/stow_process/__init__.py
|
amazon-picking-challenge/team_pfn
|
2f76524b067d816d8407f6c4fae4e6d33939c024
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2016 Preferred Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from visible_greedy import VisibleGreedyStowing
def select_strategy(pos_info):
# TODO: select the best strategy. this should probably just
# pick the best available implementation. there is no
# case when picking a "stupider" implementation makes sense.
return VisibleGreedyStowing(pos_info)
| 36.88
| 74
| 0.765727
| 134
| 922
| 5.238806
| 0.708955
| 0.08547
| 0.037037
| 0.045584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010471
| 0.171367
| 922
| 24
| 75
| 38.416667
| 0.908377
| 0.814534
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
469560b304c90f3048c2ba553ad9bf80c905f9e1
| 79,009
|
py
|
Python
|
ysynth4.py
|
YoutechA320U/ysynth4
|
5f3f2b094d69b11e1f916138bbb4acc1f733790b
|
[
"MIT"
] | 1
|
2020-03-21T12:02:00.000Z
|
2020-03-21T12:02:00.000Z
|
ysynth4.py
|
YoutechA320U/ysynth4
|
5f3f2b094d69b11e1f916138bbb4acc1f733790b
|
[
"MIT"
] | 2
|
2019-08-01T15:24:07.000Z
|
2019-08-24T12:03:41.000Z
|
ysynth4.py
|
YoutechA320U/ysynth4
|
5f3f2b094d69b11e1f916138bbb4acc1f733790b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#オリジナルのソースコード/Original Source
# https://github.com/YoutechA320U/ysynth4
#ライセンス/Licence
# [MIT] https://github.com/YoutechA320U/ysynth4/blob/master/LICENSE
##--リリースノート--##
#v1.75[2019/09/07]
#スクリーンキーボードを使ってスタンドアロンにWiFiの設定が出来る様になりました.
#これで有線LANを用意したり,キーボードとディスプレイを繋いで設定しなくてもオンラインアップデートが可能になりました。
#v1.8[2019/09/09]
#WiFiで設定したアクセスポイントを優先して接続するようになりました。
#他にもコマンドを見直してより安定するようになりました。
#v1.9 [2019/09/13]
#ディスプレイの描画がマルチスレッド化され外部MIDI入力とボタンMIDI入力が完全に同時かつ更にバックグラウンドでも常時反映されるようになりました。
#v1.91 [2019/09/14]
#万が一ディスプレイがフリーズした場合にMODEキーを押しながら他のキーを全て押すとディスプレイがリセットするようになりました。
#v1.92 [2019/09/16]
#一部変数の抜けを修正しました。
#v1.94 [2020/1/27]
#RaspbianbusterLiteに対応しています。
#セットアップ時に自前でTimidity++version2.15.0をビルドするようになりました。
#Ysynth4アップデート時にsetup.shを再実行し,システムやライブラリのアップデートも行うようになりました。
#それに伴いアップデート時のディスプレイの挙動も変更しました。
#v1.95 [2020/1/28]
#ピッチベンドの出力が正常でない不具合を修正しました。
#v1.96 [2020/4/11]
#RaspberryPi4のOTG_USB-MIDIに対応しました。
#v1.97 [2020/4/21]
#サウンドカード切り替えの処理が間違っていたので修正
##--##--##--##--##
import RPi.GPIO as GPIO
import time
import subprocess
import rtmidi
import sys
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import ST7735
import threading
width = 128
height = 160
disp = ST7735.ST7735(
port=0,
cs=0,
dc=12,
rst=25,
rotation=90,
width=128,
height=160,
spi_speed_hz=31200000+15600000
)
# Initialize display.
disp.begin()
width = disp.width
height = disp.height
img = Image.new('RGB',(width,height),color=(0,0,0))
draw = ImageDraw.Draw(img)
draw.rectangle((0,0,160,160),(0,0,0))
#*#*#*#*#*#*#
version= 1.97
day="2020/04/21"
#*#*#*#*#*#*#*
volume = 70
mode = 0
midCH = 0
midPRG= [0]*16
midCC7= [100]*16
midCC11= [127]*16
midCC10= [64]*16
midCC1= [0]*16
midCC91= [40]*16
midCC93= [0]*16
midCC94= [0]*16
pb1 = [0x00]*16
pb2 = [0x40]*16
playflag = [0]
sf2used = [0]
pbcounter =[0]*16
midicounter = 0
sf2counter = 0
wificounter = 0
dialog_open=0
longpush=0
input_OK = 16
input_MODE = 17
input_LEFT = 6
input_RIGHT = 24
input_UP = 23
input_DOWN = 5
mode0_write= False
waitflag=0
GPIO.setmode(GPIO.BCM)
GPIO.setup(input_OK,GPIO.IN,pull_up_down=GPIO.PUD_UP)
GPIO.setup(input_MODE,GPIO.IN,pull_up_down=GPIO.PUD_UP)
GPIO.setup(input_RIGHT,GPIO.IN,pull_up_down=GPIO.PUD_UP)
GPIO.setup(input_LEFT,GPIO.IN,pull_up_down=GPIO.PUD_UP)
GPIO.setup(input_UP,GPIO.IN,pull_up_down=GPIO.PUD_UP)
GPIO.setup(input_DOWN,GPIO.IN,pull_up_down=GPIO.PUD_UP)
fontss = ImageFont.truetype('/usr/share/fonts/truetype/takao-gothic/TakaoGothic.ttf',12,encoding='unic')
fonts = ImageFont.truetype('/usr/share/fonts/truetype/takao-gothic/TakaoGothic.ttf',13,encoding='unic')
fontm = ImageFont.truetype('/usr/share/fonts/truetype/takao-gothic/TakaoGothic.ttf',14,encoding='unic')
fontl = ImageFont.truetype('/usr/share/fonts/truetype/takao-gothic/TakaoGothic.ttf',20,encoding='unic')
fontll = ImageFont.truetype('/usr/share/fonts/truetype/takao-gothic/TakaoGothic.ttf',24,encoding='unic')
subprocess.call('sudo mount -t vfat -o ,iocharset=utf8 /dev/sda1 /media/usb0' ,shell=True)
mountcheck=subprocess.check_output("mount|grep -m1 /dev/sda|awk '{print $3}'" ,shell=True).decode('utf-8').strip()
def boot_disp():
global mountcheck
for x in range(69):
draw.rectangle((0,0,160,128),(0,0,0))
if mountcheck == str("/media/usb0"):
draw.text((35,1+x-32),"Ysynth4",font=fontll,fill=(55,255,255))
#draw.text((35,1+x-32)," ®",font=fontl,fill=(55,255,255))
if mountcheck != str("/media/usb0"):
draw.rectangle((40,1+x-32,120,1+x-10),outline=(100,100,100),fill=(55,255,255))
#draw.text((35,1+x-32)," ®",font=fontl,fill=(55,255,255))
draw.text((35,100),"v{0}/{1}" .format(version,day),font=fontss,fill=(55,255,255))
draw.text((40,110),"@YoutechA320U",font=fontss,fill=(55,255,255))
time.sleep(0.01)
disp.display(img)
boot_disp()
if mountcheck == str("/media/usb0"):
subprocess.call('sudo mkdir /media/usb0/midi' ,shell=True)
subprocess.call('sudo mkdir /media/usb0/sf2' ,shell=True)
subprocess.call('sudo mkdir /media/usb0/timidity_cfg' ,shell=True)
fluidcheck=subprocess.check_output("find /media/usb0/sf2/ -name FluidR3_GM.sf2" ,shell=True).decode('utf-8').strip()
if fluidcheck != str("/media/usb0/sf2/FluidR3_GM.sf2"):
subprocess.call('sudo cp /usr/share/sounds/sf2/FluidR3_GM.sf2 media/usb0/sf2/' ,shell=True)
subprocess.call('rename.ul .MID .mid /media/usb0/midi/*' ,shell=True)
subprocess.call('rename.ul .SF2 .sf2 /media/usb0/sf2/*' ,shell=True)
subprocess.call('rename.ul .CFG .cfg /media/usb0/timidity_cfg/*' ,shell=True)
midi = subprocess.check_output('find /media/usb0/midi/ -name \*.mid|sort' ,shell=True).decode('utf-8').strip().replace('/media/usb0/midi/','').replace('.mid','').split('\n')
playflag = [0]*len(midi)
if midi[0]=='':
midi= ["midi_None"]
midicounter=0
sf2 = subprocess.check_output('find /media/usb0/sf2/ -name \*.sf2|sort' ,shell=True).decode('utf-8').strip().replace('/media/usb0/sf2/','').replace('.sf2','').split('\n')
sf2used = [0]*len(sf2)
if sf2[0]=='':
sf2 = ["sf2_None"]
sf2counter = 0
cfg = subprocess.check_output('find /media/usb0/timidity_cfg/ -name \*.cfg|sort' ,shell=True).decode('utf-8').strip().replace('/media/usb0/timidity_cfg/','').replace('.cfg','').split('\n')
if (sf2 != cfg) and (sf2[0] != "sf2_None"):
list_difference = list(set(cfg) - set(sf2))
for x in range(len(list_difference)):
subprocess.call('sudo rm "/media/usb0/timidity_cfg/{}.cfg"' .format(list_difference[x]) ,shell=True)
list_difference = list(set(sf2) - set(cfg))
for x in range(len(list_difference)):
subprocess.call('''sudo /home/pi/ysynth4/cfgforsf -C "/media/usb0/sf2/{sf2name}.sf2" | sed -e 's/(null)//' -e 's/^[ ]*//g' -e '/(null)#/d' -e /^#/d | grep -C 1 % | sed -e '/--/d' -e /^$/d > "/media/usb0/timidity_cfg/{sf2name}.cfg"''' .format(sf2name=list_difference[x]) ,shell=True)
if sf2[0] == "sf2_None":
subprocess.call('sudo rm /media/usb0/sf2/*.cfg' ,shell=True)
draw.rectangle((0,0,160,128),(0,0,0))
time.sleep(2)
disp.display(img)
subprocess.call('sudo killall ttymidi',shell=True)
subprocess.call('sudo killall timidity',shell=True)
subprocess.Popen('sudo /home/pi/ysynth4/ttymidi -s /dev/ttyAMA0 -b 38400',shell=True)
time.sleep(1)
midiout = rtmidi.MidiOut()
midiout.open_virtual_port("Ysynth4_out") # 仮想MIDI出力ポートの名前
midiin = rtmidi.MidiIn()
midiin.open_virtual_port("Ysynth4_in") # 仮想MIDI入力ポートの名前
midiin.ignore_types(sysex=False)
def allnoteoff():
a = 0xb0
while (a < 0xbf ):
midiout.send_message([a,0x78,0x00])
a += 1
subprocess.call('amixer cset numid=1 {}% > /dev/null'.format(volume) ,shell=True)
wifi_connect=subprocess.check_output('''wpa_cli -i wlan0 status|grep -v bssid |grep ssid |sed -e 's/ssid=//g' ''' ,shell=True).decode('utf-8').strip()
if wifi_connect=="":
wifi_connect="接続できません"
#wifi_connect="**********"
audio_card = str(subprocess.check_output("aplay -l |grep -m1 'card 1'|awk '{print $4;}' " ,shell=True).decode('utf-8').strip().replace(']','').replace('[','').replace(',',''))
mountcheck=subprocess.check_output("mount|grep -m1 /dev/sda|awk '{print $3}'" ,shell=True).decode('utf-8').strip()
subprocess.call('sh /home/pi/ysynth4/midiconnect.sh',shell=True)
x = 3
y = 0
m_size="A" #1文字分
cur_size="▶"
tsx,tsy = draw.textsize(m_size,fontss)
tmx,tmy = draw.textsize(m_size,fontm)
tlx,tly = draw.textsize(m_size,fontl)
csx,csy = draw.textsize(m_size,fontss)
#txt_size_s_x,txt_size_s_y = tsx,tsy
#txt_size_m_x,txt_size_m_y = tmx,tmy
#txt_size_l_x,txt_size_l_y = tlx,tly
#cur_size_x,cur_size_y = csx,cxy
mode0_coordi=0
mode0_coordi_xl=[3,3,3,3,3,3,3,3,tmx*10,tmx*10]
mode0_coordi_yl=[tly/4,tly+tmy+1,\
tly+tmy*2+1,tly+tmy*3+1,\
tly+tmy*4+1,tly+tmy*5+1,\
tly+tmy*6+1,tly+tmy*7+1,\
tly+tmy+1,tly+tmy*2+1]
mode1_coordi=0
mode1_coordi_xl=[3,3]
mode1_coordi_yl=[tly+tmy+8,tly+tmy*3+8]
mode2_coordi=0
mode2_coordi_xl=[3,3,3,3,3,3,3]
mode2_coordi_yl=[tly+tmy+1,tly+tmy*2+1,\
tly+tmy*3+1,tly+tmy*4+1,\
tly+tmy*5+1,tly+tmy*6+1,tly+tmy*7+1]
dlog_zahyo=1
#dialog_coordi=dlog_zahyo
dlog_zahyo_xl=[12,82]
dlog_zahyo_yl=[90,90]
sk_zahyo=0
msg = None
def mididisp(): #MIDI入力をディスプレイに反映する処理
global midPRG,midCC7,midCC11,midCC10,midCC10,midCC1,midCC91,midCC93,midCC94,pb1,pb2,mode0_write
while True:
time.sleep(0.0001)
msg = midiin.get_message()
if msg is None:
message,deltatime = None,None
if msg is not None:
message,deltatime = msg
try:
if message == ([240,65,16,66,18,64,0,127,0,65,247]) or message ==( [240,67,16,76,0,0,126,0,247]) or message == ([240,126,127,9,1,247]) or message == ([240,126,127,9,3,247]) :
midPRG= [0]*16
midCC7= [100]*16
midCC11= [127]*16
midCC10= [64]*16
midCC1= [0]*16
midCC91= [40]*16
midCC93= [0]*16
midCC94= [0]*16
pb1 = [0]*16
pb2 = [0x40]*16
if mode == 0:
draw.rectangle((tmx*5,tly+tmy+1,65,128),(0,0,0))
draw.text((tmx*5,tly+tmy+1),str("{0:03d}".format(midPRG[midCH] + 1)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*2+1),str("{0:03d}".format(midCC7[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*3+1),str("{0:03d}".format(midCC11[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*4+1),str("{0:03d}".format(midCC10[midCH]-64)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*5+1),str("{0:03d}".format(midCC1[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*6+1),str("{0:03d}".format(midCC91[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*7+1),str("{0:03d}".format(midCC93[midCH])),font=fontm,fill=(255,255,55))
draw.rectangle((tmx*18,tly+tmy+1,160,128),(0,0,0))
draw.text((tmx*18,tly+tmy+1),str("{0:03d}".format(midCC94[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*18,tly+tmy*2+1),str("{0:04d}".format(0x80*pb2[midCH]+pb1[midCH]-8192)),font=fontm,fill=(255,255,55))
mode0_write= True
except :
pass
for forlch in range(16):
if message[0] == 192+forlch :
if midPRG[forlch] != message[1]:
midPRG[forlch] = message[1]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*5,tly+tmy+1,65,tly+tmy*2),(0,0,0))
draw.text((tmx*5,tly+tmy+1),str("{0:03d}".format(midPRG[midCH] + 1)),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 176+forlch and message[1] ==7:
if midCC7[forlch] != message[2]:
midCC7[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*5,tly+tmy*2+1,65,tly+tmy*3),(0,0,0))
draw.text((tmx*5,tly+tmy*2+1),str("{0:03d}".format(midCC7[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 176+forlch and message[1] ==11:
if midCC11[forlch] != message[2]:
midCC11[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*5,tly+tmy*3+1,65,tly+tmy*4),(0,0,0))
draw.text((tmx*5,tly+tmy*3+1),str("{0:03d}".format(midCC11[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 176+forlch and message[1] ==10:
if midCC10[forlch] != message[2]:
midCC10[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*5,tly+tmy*4+1,65,tly+tmy*5),(0,0,0))
draw.text((tmx*5,tly+tmy*4+1),str("{0:03d}".format(midCC10[midCH]-64)),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 176+forlch and message[1] ==1:
if midCC1[forlch] != message[2]:
midCC1[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*5,tly+tmy*5+1,65,tly+tmy*6),(0,0,0))
draw.text((tmx*5,tly+tmy*5+1),str("{0:03d}".format(midCC1[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 176+forlch and message[1] ==91:
if midCC91[forlch] != message[2]:
midCC91[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*5,tly+tmy*6+1,65,tly+tmy*7),(0,0,0))
draw.text((tmx*5,tly+tmy*6+1),str("{0:03d}".format(midCC91[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 176+forlch and message[1] ==93:
if midCC93[forlch] != message[2]:
midCC93[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*5,tly+tmy*7+1,65,128),(0,0,0))
draw.text((tmx*5,tly+tmy*7+1),str("{0:03d}".format(midCC93[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 176+forlch and message[1] ==94:
if midCC94[forlch] != message[2]:
midCC94[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*18,tly+tmy+1,160,tly+tmy*2),(0,0,0))
draw.text((tmx*18,tly+tmy+1),str("{0:03d}".format(midCC94[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
if message[0] == 0xe0+forlch :
if pb1[forlch] != message[1] or pb2[forlch] != message[2]:
pb1[forlch] = message[1]
pb2[forlch] = message[2]
if mode == 0 and forlch==midCH:
draw.rectangle((tmx*18,tly+tmy*2+1,160,tly+tmy*3),(0,0,0))
draw.text((tmx*18,tly+tmy*2+1),str("{0:04d}".format(0x80*pb2[forlch]+pb1[forlch]-8192)),font=fontm,fill=(255,255,55))
mode0_write= True
##MIDI入力をディスプレイに反映する処理ここまで
def waiting(): #待ち状態の処理
global waitflag
while True:
time.sleep(0.0001)
if waitflag==1:
draw.rectangle((tmx*6,tly+tmy+1,160,tly+tmy+14),outline=(0,0,0),fill=(0,0,0))
draw.text((tmx*6,tly+tmy+1),"お待ちください",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==1:
draw.rectangle((tmx*6,tly+tmy+1,160,tly+tmy+14),outline=(0,0,0),fill=(0,0,0))
draw.text((tmx*6,tly+tmy+1),"お待ちください.",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==1:
draw.rectangle((tmx*6,tly+tmy+1,160,tly+tmy+14),outline=(0,0,0),fill=(0,0,0))
draw.text((tmx*6,tly+tmy+1),"お待ちください..",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==1:
draw.rectangle((tmx*6,tly+tmy+1,160,tly+tmy+14),outline=(0,0,0),fill=(0,0,0))
draw.text((tmx*6,tly+tmy+1),"お待ちください...",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==2:
draw.rectangle((9,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
draw.text((9,tly+tmy+1),"SSID:検索中",font=fontm,fill=(55,255,255))
disp.display(img)
time.sleep(0.5)
if waitflag==2:
draw.rectangle((9,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
draw.text((9,tly+tmy+1),"SSID:検索中.",font=fontm,fill=(55,255,255))
disp.display(img)
time.sleep(0.5)
if waitflag==2:
draw.rectangle((9,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
draw.text((9,tly+tmy+1),"SSID:検索中..",font=fontm,fill=(55,255,255))
disp.display(img)
time.sleep(0.5)
if waitflag==2:
draw.rectangle((9,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
draw.text((9,tly+tmy+1),"SSID:検索中...",font=fontm,fill=(55,255,255))
disp.display(img)
time.sleep(0.5)
if waitflag==3:
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9,tly+tmy*2+1),"お待ちください",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==3:
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9,tly+tmy*2+1),"お待ちください.",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==3:
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9,tly+tmy*2+1),"お待ちください..",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==3:
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9,tly+tmy*2+1),"お待ちください...",font=fontm,fill=(255,255,55))
disp.display(img)
time.sleep(0.5)
if waitflag==4:
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60)," アップデートしています",font=fontss,fill=(0,255,0))
disp.display(img)
time.sleep(0.5)
if waitflag==4:
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60)," アップデートしています.",font=fontss,fill=(0,255,0))
disp.display(img)
time.sleep(0.5)
if waitflag==4:
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60)," アップデートしています..",font=fontss,fill=(0,255,0))
disp.display(img)
time.sleep(0.5)
if waitflag==4:
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60)," アップデートしています...",font=fontss,fill=(0,255,0))
disp.display(img)
time.sleep(0.5)
thread1 = threading.Thread(target=waiting)
thread1.start()
def mode0_default_disp(): #モード1(MIDIコントローラ)の表示
global mode0_write
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((mode0_coordi_xl[mode0_coordi],mode0_coordi_yl[mode0_coordi]),cur_size,font=fontss,fill=(255,255,255))
draw.text((9,0),"CH:",font=fontl,fill=(55,255,255))
draw.text((9,tly+tmy+1),"PC :",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*2+1),"VOL:",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*3+1),"EXP:",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*4+1),"PAN:",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*5+1),"MOD:",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*6+1),"REV:",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*7+1),"CHO:",font=fontm,fill=(55,255,255))
draw.text((tlx*4,0),str("{0:02}".format(midCH + 1)),font=fontl,fill=(255,255,55))
draw.text((tmx*5,tly+tmy+1),str("{0:03d}".format(midPRG[midCH] + 1)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*2+1),str("{0:03d}".format(midCC7[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*3+1),str("{0:03d}".format(midCC11[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*4+1),str("{0:03d}".format(midCC10[midCH]-64)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*5+1),str("{0:03d}".format(midCC1[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*6+1),str("{0:03d}".format(midCC91[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*7+1),str("{0:03d}".format(midCC93[midCH])),font=fontm,fill=(255,255,55))
draw.text((csx+tmx*10,tly+tmy+1),"DLY :",font=fontm,fill=(55,255,255))
draw.text((tmx*18,tly+tmy+1),str("{0:03d}".format(midCC94[midCH])),font=fontm,fill=(255,255,55))
draw.text((csx+tmx*10,tly+tmy*2+1),"P.BEND:",font=fontm,fill=(55,255,255))
draw.text((tmx*18,tly+tmy*2+1),str("{0:04d}".format(0x80*pb2[midCH]+pb1[midCH]-8192)),font=fontm,fill=(255,255,55))
draw.text((tlx*8,0),"SysVol: "+str(volume),font=fontss,fill=(0,255,0))
mode0_write= True
def mode1_default_disp(): #モード2(シーケンサー)の表示
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((mode1_coordi_xl[mode1_coordi],mode1_coordi_yl[mode1_coordi]),cur_size,font=fontss,fill=(255,255,255))
draw.text((9,0),"SMF",font=fontl,fill=(255,255,55))
draw.text((9,tly+tmy+1),"SF2:{0:03d}/{1:03d}" .format(sf2counter + 1 ,len(sf2)),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*3+1),"SMF:{0:03d}/{1:03d}".format(midicounter + 1 ,len(midi) ),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*2+1),sf2[sf2counter],font=fontm,fill=(255,255,55))
if sf2used[sf2counter]==1:
draw.text((9,tly+tmy+1)," ♪",font=fontm,fill=(55,255,255))
if playflag[midicounter]==1:
draw.text((9,tly+tmy*3+1)," ▶",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*4+1),midi[midicounter],font=fontm,fill=(255,255,55))
draw.text((tlx*8,0),"SysVol: "+str(volume),font=fontss,fill=(0,255,0))
disp.display(img)
def mode2_default_disp(): #モード(設定)の表示
global wifi_connect
wifi_connect=subprocess.check_output('''wpa_cli -i wlan0 status|grep -v bssid |grep ssid |sed -e 's/ssid=//g' ''' ,shell=True).decode('utf-8').strip()
if wifi_connect=="" and wifi_connect!="お待ちください...":
wifi_connect="接続できません"
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((mode2_coordi_xl[mode2_coordi],mode2_coordi_yl[mode2_coordi]),cur_size,font=fontss,fill=(255,255,255))
draw.text((9,0),"設定",font=fontl,fill=(255,255,55))
draw.text((9,tly+tmy+1),"WiFi:",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*2+1),"Audio:",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*3+1),"USBメモリ",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*4+1),"Ysynth4アップデート",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*5+1),"再起動",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*6+1),"シャットダウン",font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*7+1),"リロード",font=fontm,fill=(55,255,255))
draw.text((tmx*6,tly+tmy+1),wifi_connect,font=fontm,fill=(255,255,55))
draw.text((tmx*7,tly+tmy*2+1),audio_card,font=fontm,fill=(255,255,55))
draw.text((tlx*8,0),"SysVol: "+str(volume),font=fontss,fill=(0,255,0))
disp.display(img)
def mode3_default_disp(): #モード3(WiFi選択)の表示
global wifi,wificounter,waitflag
wificounter=0
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((9,0),"WiFi",font=fontl,fill=(255,255,55))
draw.text((tlx*8,0),"SysVol: "+str(volume),font=fontss,fill=(0,255,0))
waitflag=2
wifi=subprocess.check_output('''iwlist wlan0 scan| grep ESSID |sed -e 's/ESSID://g' -e 's/[ ]//g' -e 's/"//g'|sort ''' ,shell=True).decode('utf-8').strip().split('\n')
if len(wifi)>1:
wifi= [s for s in wifi if s != ""]
if wifi[0]=="":
wifi[0]="見つかりませんでした"
waitflag=0
draw.rectangle((9+tmx*5,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
draw.text((9,tly+tmy+1)," {0:03d}/{1:03d}" .format(wificounter + 1,len(wifi)),font=fontm,fill=(55,255,255))
draw.text((mode2_coordi_xl[1],mode2_coordi_yl[1]),cur_size,font=fontss,fill=(255,255,255))
draw.text((9,tly+tmy*2+1),wifi[wificounter],font=fontm,fill=(255,255,55))
disp.display(img)
def dialog_window0(): #ダイアログのウィンドウを開く1種類目(※これ以外ないです)
draw.rectangle((10,tly+tmy+1,150,110),outline=(255,255,255),fill=(217,207,201))
draw.rectangle((10,tly+tmy+5,150,20),outline=(217,207,201),fill=(8,34,109))
draw.text((12,22),"確認",font=fontss,fill=(255,255,255))
draw.rectangle((20,90,70,90+tsy),outline=(100,100,100),fill=(217,207,201))
draw.rectangle((90,90,140,90+tsy),outline=(100,100,100),fill=(217,207,201))
draw.text((34,90),"はい",font=fontss,fill=(0,0,0))
draw.text((98,90),"いいえ",font=fontss,fill=(0,0,0))
disp.display(img)
def longpush_(button): #長押し
global longpush
while (GPIO.input(button) == 0 and longpush !=100):
time.sleep(0.01)
longpush +=1
if longpush==100:
break
else:
continue
def dialog_loop0(txt,cmd): #ダイアログの選択待ち
global dlog_zahyo,dlog_zahyo_xl,dlog_zahyo_yl,longpush
while (GPIO.input(input_OK)) == 0:
continue
while True:
time.sleep(0.0001)
if GPIO.input(input_RIGHT) == 0 :
time.sleep(0.01)
draw.rectangle((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo],dlog_zahyo_xl[dlog_zahyo]+csx,dlog_zahyo_yl[dlog_zahyo]+csy),(217,207,201))
dlog_zahyo +=1
if dlog_zahyo >1:
dlog_zahyo=0
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
longpush_(input_RIGHT)
if GPIO.input(input_LEFT) == 0 :
time.sleep(0.01)
draw.rectangle((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo],dlog_zahyo_xl[dlog_zahyo]+csx,dlog_zahyo_yl[dlog_zahyo]+csy),(217,207,201))
dlog_zahyo -=1
if dlog_zahyo <0:
dlog_zahyo=1
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
longpush_(input_LEFT)
if GPIO.input(input_OK) == 0:
time.sleep(0.05)
if dlog_zahyo==0:
subprocess.Popen(cmd ,shell=True)
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60),txt,font=fontss,fill=(0,255,0))
disp.display(img)
time.sleep(3)
draw.rectangle((0,0,160,128),(0,0,0))
time.sleep(1)
break
if dlog_zahyo==1:
mode2_default_disp()
while (GPIO.input(input_OK)) == 0:
continue
break
if (GPIO.input(input_LEFT) and GPIO.input(input_RIGHT))== 1 and longpush !=0:
longpush=0
def sc_key(): #スクリーンキーボード
global longpush,mode,dialog_open,wifi,wificounter,wifi_psk,wifi_conf,waitflag
moji_in=[]
wifi_psk=["",""]
wifi_conf=subprocess.check_output('''grep ssid /etc/wpa_supplicant/wpa_supplicant.conf|sed -e 's/ssid=//g' -e 's/"//g' -e 's/psk=//g' -e 's/^[ \t]*//g' ''' ,shell=True).decode('utf-8').strip().split('\n')
wifi_conf_check=wifi[wificounter] in wifi_conf
if wifi_conf_check is True:
wifi_psk=subprocess.check_output('''grep {} -m1 -A 1 /etc/wpa_supplicant/wpa_supplicant.conf|sed -e 's/ssid=//g' -e 's/"//g' -e 's/psk=//g' -e 's/^[ \t]*//g' ''' .format(wifi[wificounter]),shell=True).decode('utf-8').strip().split('\n')
moji_in=wifi_psk[1]
shift=0
moji=["1","2","3","4","5","6","7","8","9","0","-","BS","q","w","e","r","t","y","u","i",\
"o","p","","⏎","a","s","d","f","g","h","j","k","l",":","'","`","z","x","c","v","b","n",\
"m",",",".","/","=","@"]
draw.rectangle((0,61,160,76),outline=(0,0,0),fill=(255,255,255))
for k in range(48):
k_size_x,k_size_y = draw.textsize(moji[k],fonts)
moji_center_w=(13-k_size_x)/2+1
draw.rectangle((13*(k-12*(k//12)),76+13*(k//12),13*(1+k-12*(k//12)),76+13*((k//12)+1)),outline=(0,0,0),fill=(217,207,201))
draw.text((13*(k-12*(k//12))+moji_center_w,76+13*(k//12)),moji[k],font=fonts,fill=(0,0,0))
draw.rectangle((0,76,13,76+13),outline=(217,207,201),fill=(8,34,109))
draw.text((4,76),moji[0],font=fonts,fill=(255,255,255))
moji_in=("".join(map(str,moji_in)))
draw.text((1,61),moji_in,font=fonts,fill=(0,0,0))
disp.display(img)
moji_in=list(moji_in)
sk_zahyo = 0
#sckey_coordi = sk_zahyo
while True:
time.sleep(0.001)
sk_zahyo_size_x,sk_zahyo_size_y = draw.textsize(moji[sk_zahyo],fonts)
moji_center_w=(13-sk_zahyo_size_x)/2+1
if GPIO.input(input_LEFT) == 0:
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(0,0,0),fill=(217,207,201))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(0,0,0))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(0,0,0))
sk_zahyo -= 1
if sk_zahyo==-1 or sk_zahyo==11 or sk_zahyo==23 or sk_zahyo==35:
sk_zahyo += 12
sk_zahyo_size_x,sk_zahyo_size_y = draw.textsize(moji[sk_zahyo],fonts)
moji_center_w=(13-sk_zahyo_size_x)/2+1
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(217,207,201),fill=(8,34,109))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(255,255,255))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(255,255,255))
disp.display(img)
longpush_(input_LEFT)
if GPIO.input(input_RIGHT) == 0:
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(0,0,0),fill=(217,207,201))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(0,0,0))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(0,0,0))
sk_zahyo += 1
if sk_zahyo==12 or sk_zahyo==24 or sk_zahyo==36 or sk_zahyo==48:
sk_zahyo -= 12
sk_zahyo_size_x,sk_zahyo_size_y = draw.textsize(moji[sk_zahyo],fonts)
moji_center_w=(13-sk_zahyo_size_x)/2+1
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(217,207,201),fill=(8,34,109))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(255,255,255))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(255,255,255))
disp.display(img)
longpush_(input_RIGHT)
if GPIO.input(input_UP) == 0:
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(0,0,0),fill=(217,207,201))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(0,0,0))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(0,0,0))
sk_zahyo -= 12
if sk_zahyo <=-1:
sk_zahyo += 48
sk_zahyo_size_x,sk_zahyo_size_y = draw.textsize(moji[sk_zahyo],fonts)
moji_center_w=(13-sk_zahyo_size_x)/2+1
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(217,207,201),fill=(8,34,109))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(255,255,255))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(255,255,255))
disp.display(img)
longpush_(input_UP)
if GPIO.input(input_DOWN) == 0:
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(0,0,0),fill=(217,207,201))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(0,0,0))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(0,0,0))
sk_zahyo += 12
if sk_zahyo >=48:
sk_zahyo -= 48
sk_zahyo_size_x,sk_zahyo_size_y = draw.textsize(moji[sk_zahyo],fonts)
moji_center_w=(13-sk_zahyo_size_x)/2+1
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(217,207,201),fill=(8,34,109))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(255,255,255))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(255,255,255))
disp.display(img)
longpush_(input_DOWN)
if GPIO.input(input_OK) == 0 :
if sk_zahyo !=11 and sk_zahyo !=23 and len(moji_in) < 22:
draw.rectangle((0,61,160,76),outline=(0,0,0),fill=(255,255,255))
moji_in.append(moji[sk_zahyo] )
moji_in=("".join(map(str,moji_in)))
draw.text((1,61),moji_in,font=fonts,fill=(0,0,0))
disp.display(img)
moji_in=list(moji_in)
if sk_zahyo == 11 and moji_in !=[]:
draw.rectangle((0,61,160,76),outline=(0,0,0),fill=(255,255,255))
moji_in.pop()
moji_in=("".join(map(str,moji_in)))
draw.text((1,61),moji_in,font=fonts,fill=(0,0,0))
disp.display(img)
moji_in=list(moji_in)
if sk_zahyo == 23 :
moji_in=("".join(map(str,moji_in)))
if wifi_psk[1]==moji_in and wifi_psk[1] !="" and moji_in !="":
mode2_default_disp()
waitflag=1
wpa_list=subprocess.check_output('''grep ssid /etc/wpa_supplicant/wpa_supplicant.conf|sed -e 's/ssid=//g' -e 's/[ ]//g' -e 's/"//g' ''',shell=True).decode('utf=8').strip().split('\n')
subprocess.call('''sudo ifconfig wlan0 down''',shell=True)
subprocess.call('''sudo ifconfig wlan0 up''',shell=True)
subprocess.call('''sudo wpa_cli -i wlan0 reconfigure''',shell=True)
if wifi[wificounter] in wpa_list:
wpa_list_index=wpa_list.index(wifi[wificounter])
subprocess.check_output('''sudo wpa_cli -i wlan0 select_network {}'''.format(wpa_list_index) ,shell=True)
else:
pass
time.sleep(6)
waitflag=0
mode2_default_disp()
mode=2
dialog_open=0
longpush_(input_OK)
break
if wifi_psk[1]==moji_in and wifi_psk[1] =="" and moji_in =="":
mode2_default_disp()
mode=2
dialog_open=0
longpush_(input_OK)
break
if wifi_psk[1] !="" and moji_in !="" and wifi_psk[1] !=moji_in:
mode2_default_disp()
waitflag=1
delconf=subprocess.check_output('''grep -A 2 -B 1 {} -n /etc/wpa_supplicant/wpa_supplicant.conf| sed -e 's/:.*//g' -e 's/-.*//g' ''' .format(wifi[wificounter]) ,shell=True).decode('utf-8').strip().split('\n')
subprocess.call('''sudo sed -i '{},{}d' /etc/wpa_supplicant/wpa_supplicant.conf ''' .format(delconf[0],delconf[len(delconf)-1]) ,shell=True)
subprocess.call('''sudo sed -i -e '$ a network={' /etc/wpa_supplicant/wpa_supplicant.conf''' ,shell=True)
subprocess.call('''sudo sed -i -e '$ a \ ssid="{}"' /etc/wpa_supplicant/wpa_supplicant.conf''' .format(wifi[wificounter]),shell=True)
subprocess.call('''sudo sed -i -e '$ a \ psk="{}"' /etc/wpa_supplicant/wpa_supplicant.conf''' .format(moji_in),shell=True)
subprocess.call('''sudo sed -i -e '$ a }' /etc/wpa_supplicant/wpa_supplicant.conf''' ,shell=True)
subprocess.call('''sudo ifconfig wlan0 down''',shell=True)
subprocess.call('''sudo ifconfig wlan0 up''',shell=True)
wpa_reconfigure=subprocess.check_output('''sudo wpa_cli -i wlan0 reconfigure''',shell=True).strip().decode('utf-8')
if wpa_reconfigure=="FAIL":
delconf=subprocess.check_output('''grep -A 2 -B 1 {} -n /etc/wpa_supplicant/wpa_supplicant.conf| sed -e 's/:.*//g' -e 's/-.*//g' ''' .format(wifi[wificounter]) ,shell=True).decode('utf-8').strip().split('\n')
subprocess.call('''sudo sed -i '{},{}d' /etc/wpa_supplicant/wpa_supplicant.conf ''' .format(delconf[0],delconf[len(delconf)-1]) ,shell=True)
subprocess.call('''sudo sed -i -e '$ a network={' /etc/wpa_supplicant/wpa_supplicant.conf''' ,shell=True)
subprocess.call('''sudo sed -i -e '$ a \ ssid="{}"' /etc/wpa_supplicant/wpa_supplicant.conf''' .format(wifi[wificounter]),shell=True)
subprocess.call('''sudo sed -i -e '$ a \ psk="{}"' /etc/wpa_supplicant/wpa_supplicant.conf''' .format(wifi_psk[1] ),shell=True)
subprocess.call('''sudo sed -i -e '$ a }' /etc/wpa_supplicant/wpa_supplicant.conf''' ,shell=True)
subprocess.call('''sudo ifconfig wlan0 down''',shell=True)
subprocess.call('''sudo ifconfig wlan0 up''',shell=True)
subprocess.call('''sudo wpa_cli -i wlan0 reconfigure''',shell=True)
wpalist=subprocess.check_output('''grep ssid /etc/wpa_supplicant/wpa_supplicant.conf|sed -e 's/ssid=//g' -e 's/[ ]//g' -e 's/"//g' ''',shell=True).decode('utf=8').strip().split('\n')
if wifi[wificounter] in wpalist:
wpa_list_index=wpalist.index(wifi[wificounter])
subprocess.check_output('''sudo wpa_cli -i wlan0 select_network {}'''.format(wpa_list_index) ,shell=True)
else:
pass
time.sleep(6)
waitflag=0
mode2_default_disp()
mode=2
dialog_open=0
longpush_(input_OK)
break
if wifi_psk[1] =="":
mode2_default_disp()
waitflag=1
subprocess.call('''sudo sed -i -e '$ a network={' /etc/wpa_supplicant/wpa_supplicant.conf''' ,shell=True)
subprocess.call('''sudo sed -i -e '$ a \ ssid="{}"' /etc/wpa_supplicant/wpa_supplicant.conf''' .format(wifi[wificounter]),shell=True)
subprocess.call('''sudo sed -i -e '$ a \ psk="{}"' /etc/wpa_supplicant/wpa_supplicant.conf''' .format(moji_in),shell=True)
subprocess.call('''sudo sed -i -e '$ a }' /etc/wpa_supplicant/wpa_supplicant.conf''' ,shell=True)
subprocess.call('''sudo ifconfig wlan0 down''',shell=True)
subprocess.call('''sudo ifconfig wlan0 up''',shell=True)
wpa_reconfigure=subprocess.check_output('''sudo wpa_cli -i wlan0 reconfigure''',shell=True).strip().decode('utf-8')
if wpa_reconfigure=="FAIL":
delconf=subprocess.check_output('''grep -A 2 -B 1 {} -n /etc/wpa_supplicant/wpa_supplicant.conf| sed -e 's/:.*//g' -e 's/-.*//g' ''' .format(wifi[wificounter]) ,shell=True).decode('utf-8').strip().split('\n')
subprocess.call('''sudo sed -i '{},{}d' /etc/wpa_supplicant/wpa_supplicant.conf ''' .format(delconf[0],delconf[len(delconf)-1]) ,shell=True)
subprocess.call('''sudo ifconfig wlan0 down''',shell=True)
subprocess.call('''sudo ifconfig wlan0 up''',shell=True)
subprocess.call('''sudo wpa_cli -i wlan0 reconfigure''',shell=True)
wpalist=subprocess.check_output('''grep ssid /etc/wpa_supplicant/wpa_supplicant.conf|sed -e 's/ssid=//g' -e 's/[ ]//g' -e 's/"//g' ''',shell=True).decode('utf=8').strip().split('\n')
if wifi[wificounter] in wpalist:
wpa_list_index=wpalist.index(wifi[wificounter])
subprocess.check_output('''sudo wpa_cli -i wlan0 select_network {}'''.format(wpa_list_index) ,shell=True)
else:
pass
time.sleep(6)
waitflag=0
mode2_default_disp()
mode=2
dialog_open=0
longpush_(input_OK)
break
if moji_in =="":
mode2_default_disp()
waitflag=1
delconf=subprocess.check_output('''grep -A 2 -B 1 {} -n /etc/wpa_supplicant/wpa_supplicant.conf| sed -e 's/:.*//g' -e 's/-.*//g' ''' .format(wifi[wificounter]) ,shell=True).decode('utf-8').strip().split('\n')
subprocess.call('''sudo sed -i '{},{}d' /etc/wpa_supplicant/wpa_supplicant.conf ''' .format(delconf[0],delconf[len(delconf)-1]) ,shell=True)
subprocess.call('''sudo ifconfig wlan0 down''',shell=True)
subprocess.call('''sudo ifconfig wlan0 up''',shell=True)
subprocess.call('''sudo wpa_cli -i wlan0 reconfigure''',shell=True)
time.sleep(6)
waitflag=0
mode2_default_disp()
mode=2
dialog_open=0
longpush_(input_OK)
break
longpush_(input_OK)
if GPIO.input(input_MODE) == 0 and shift==0:
shift=1
moji=["!","\\","#","$","%","^","&","*","(",")","_","BS","Q","W","E","R","T","Y","U","I",\
"O","P","","⏎","A","S","D","F","G","H","J","K","L",";",'"',"~","Z","X","C","V","B","N",\
"M","<",">","?","+","|"]
for k in range(48):
k_size_x,k_size_y = draw.textsize(moji[k],fonts)
moji_center_w=(13-k_size_x)/2+1
draw.rectangle((13*(k-12*(k//12)),76+13*(k//12),13*(1+k-12*(k//12)),76+13*((k//12)+1)),outline=(0,0,0),fill=(217,207,201))
if moji[k]=="_":
draw.text((13*(k-12*(k//12))+moji_center_w,76+13*(k//12)-2),moji[k],font=fonts,fill=(0,0,0))
else:
draw.text((13*(k-12*(k//12))+moji_center_w,76+13*(k//12)),moji[k],font=fonts,fill=(0,0,0))
sk_zahyo_size_x,sk_zahyo_size_y = draw.textsize(moji[sk_zahyo],fonts)
moji_center_w=(13-sk_zahyo_size_x)/2+1
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(217,207,201),fill=(8,34,109))
if moji[sk_zahyo]=="_":
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)-2),moji[sk_zahyo],font=fonts,fill=(255,255,255))
else:
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(255,255,255))
disp.display(img)
if GPIO.input(input_MODE) == 1 and shift==1:
shift=0
moji=["1","2","3","4","5","6","7","8","9","0","-","BS","q","w","e","r","t","y","u","i",\
"o","p"," ","⏎","a","s","d","f","g","h","j","k","l",":","'","`","z","x","c","v","b","n",\
"m",",",".","/","=","@"]
for k in range(48):
k_size_x,k_size_y = draw.textsize(moji[k],fonts)
moji_center_w=(13-k_size_x)/2+1
draw.rectangle((13*(k-12*(k//12)),76+13*(k//12),13*(1+k-12*(k//12)),76+13*((k//12)+1)),outline=(0,0,0),fill=(217,207,201))
draw.text((13*(k-12*(k//12))+moji_center_w,76+13*(k//12)),moji[k],font=fonts,fill=(0,0,0))
sk_zahyo_size_x,sk_zahyo_size_y = draw.textsize(moji[sk_zahyo],fonts)
moji_center_w=(13-sk_zahyo_size_x)/2+1
draw.rectangle((13*(sk_zahyo-12*(sk_zahyo//12)),76+13*(sk_zahyo//12),13*(1+sk_zahyo-12*(sk_zahyo//12)),76+13*((sk_zahyo//12)+1)),outline=(217,207,201),fill=(8,34,109))
draw.text((13*(sk_zahyo-12*(sk_zahyo//12))+moji_center_w,76+13*(sk_zahyo//12)),moji[sk_zahyo],font=fonts,fill=(255,255,255))
disp.display(img)
if (GPIO.input(input_LEFT) and GPIO.input(input_RIGHT) and GPIO.input(input_UP) and GPIO.input(input_DOWN) and GPIO.input(input_OK))== 1:
longpush=0
##初期設定ここまで##
time.sleep(1)
msg = None
mode0_default_disp()
disp.display(img)
def ysynthmain():
global longpush,volume,sf2,midi,mode0_coordi,mode1_coordi,mode2_coordi,mode3_coordi,mode0_coordi_xl,mode0_coordi_yl,\
mode1_coordi_xl,mode1_coordi_yl,mode2_coordi_xl,mode2_coordi_yl,mode3_coordi_xl,mode3_coordi_yl,midicounter,\
playflag,sf2counter,sf2used,mode,midCH,midPRG,midCC7,midCC11,midCC10,midCC10,midCC1,midCC91,\
midCC93,midCC94,pb1,pb2,dlog_zahyo,wifi,wificounter,dialog_open,mode0_write,waitflag,mountcheck
while True:
time.sleep(0.01)
try:
if aplaymidi.poll() is not None:
if mode == 1 and playflag[midicounter] == 1:
draw.rectangle((tmx*13,tly+tmy*3+1,160,tly+tmy*4+2),(0,0,0))
disp.display(img)
playflag = [0]*len(midi)
except:
pass
if GPIO.input(input_LEFT) == 0 and GPIO.input(input_MODE) != 0:
time.sleep(0.0001)
if mode==0:
if mode0_coordi ==0:
midCH -=1
if midCH<0:
midCH=15
draw.rectangle((tlx*4,0,tlx*6,tly),(0,0,0))
draw.text((tlx*4,0),str("{0:02}".format(midCH + 1)),font=fontl,fill=(255,255,55))
draw.rectangle((tmx*5,tly+tmy+1,65,128),(0,0,0))
draw.text((tmx*5,tly+tmy+1),str("{0:03d}".format(midPRG[midCH] + 1)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*2+1),str("{0:03d}".format(midCC7[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*3+1),str("{0:03d}".format(midCC11[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*4+1),str("{0:03d}".format(midCC10[midCH]-64)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*5+1),str("{0:03d}".format(midCC1[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*6+1),str("{0:03d}".format(midCC91[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*7+1),str("{0:03d}".format(midCC93[midCH])),font=fontm,fill=(255,255,55))
draw.rectangle((tmx*18,tly+tmy+1,160,128),(0,0,0))
draw.text((tmx*18,tly+tmy+1),str("{0:03d}".format(midCC94[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*18,tly+tmy*2+1),str("{0:04d}".format(0x80*pb2[midCH]+pb1[midCH]-8192)),font=fontm,fill=(255,255,55))
mode0_write= True
if mode0_coordi ==1:
midPRG[midCH] -=1
if midPRG[midCH] <0:
midPRG[midCH] =127
draw.rectangle((tmx*5,tly+tmy+1,65,tly+tmy*2),(0,0,0))
draw.text((tmx*5,tly+tmy+1),str("{0:03d}".format(midPRG[midCH] + 1)),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xc0+midCH,midPRG[midCH]])
if mode0_coordi ==2:
midCC7[midCH] -=1
if midCC7[midCH] <0:
midCC7[midCH] =127
draw.rectangle((tmx*5,tly+tmy*2+1,65,tly+tmy*3),(0,0,0))
draw.text((tmx*5,tly+tmy*2+1),str("{0:03d}".format(midCC7[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,7,midCC7[midCH]])
if mode0_coordi ==3:
midCC11[midCH] -=1
if midCC11[midCH] <0:
midCC11[midCH] =127
draw.rectangle((tmx*5,tly+tmy*3+1,65,tly+tmy*4),(0,0,0))
draw.text((tmx*5,tly+tmy*3+1),str("{0:03d}".format(midCC11[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,11,midCC11[midCH]])
if mode0_coordi ==4:
midCC10[midCH] -=1
if midCC10[midCH] <0:
midCC10[midCH] =127
draw.rectangle((tmx*5,tly+tmy*4+1,65,tly+tmy*5),(0,0,0))
draw.text((tmx*5,tly+tmy*4+1),str("{0:03d}".format(midCC10[midCH]-64)),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,10,midCC10[midCH]])
if mode0_coordi ==5:
midCC1[midCH] -=1
if midCC1[midCH] <0:
midCC1[midCH] =127
draw.rectangle((tmx*5,tly+tmy*5+1,65,tly+tmy*6),(0,0,0))
draw.text((tmx*5,tly+tmy*5+1),str("{0:03d}".format(midCC1[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,1,midCC1[midCH]])
if mode0_coordi ==6:
midCC91[midCH] -=1
if midCC91[midCH] <0:
midCC91[midCH] =127
draw.rectangle((tmx*5,tly+tmy*6+1,65,tly+tmy*7),(0,0,0))
draw.text((tmx*5,tly+tmy*6+1),str("{0:03d}".format(midCC91[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,91,midCC91[midCH]])
if mode0_coordi ==7:
midCC93[midCH] -=1
if midCC93[midCH] <0:
midCC93[midCH] =127
draw.rectangle((tmx*5,tly+tmy*7+1,65,128),(0,0,0))
draw.text((tmx*5,tly+tmy*7+1),str("{0:03d}".format(midCC93[midCH])),font=fontm,fill=(255,255,55))
midiout.send_message([0xb0+midCH,93,midCC93[midCH]])
mode0_write= True
if mode0_coordi ==8:
midCC94[midCH] -=1
if midCC94[midCH] <0:
midCC94[midCH] =127
draw.rectangle((tmx*18,tly+tmy+1,160,tly+tmy*2),(0,0,0))
draw.text((tmx*18,tly+tmy+1),str("{0:03d}".format(midCC94[midCH])),font=fontm,fill=(255,255,55))
midiout.send_message([0xb0+midCH,94,midCC94[midCH]])
mode0_write= True
if mode0_coordi ==9:
if pb1[midCH] > 0x00:
pb1[midCH] -= 0x01
elif pb1[midCH] == 0x00 and pb2[midCH] > 0x00:
pb2[midCH] -= 0x01
pb1[midCH] = 0x7f
elif pb1[midCH] == 0x00 and pb2[midCH] == 0x00:
pb1[midCH] = 0x7f
pb2[midCH] = 0x7f
draw.rectangle((tmx*18,tly+tmy*2+1,160,tly+tmy*3),(0,0,0))
draw.text((tmx*18,tly+tmy*2+1),str("{0:04d}".format(0x80*pb2[midCH]+pb1[midCH]-8192)),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xe0+midCH,pb1[midCH],pb2[midCH]])
if mode==1:
if mode1_coordi ==0:
sf2counter -= 1
if sf2counter == -1:
sf2counter = len(sf2) -1
draw.rectangle((tmx*5,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9+tmx*4,tly+tmy+1),"{0:03d}/{1:03d}" .format(sf2counter + 1 ,len(sf2)),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*2+1),sf2[sf2counter],font=fontm,fill=(255,255,55))
if sf2used[sf2counter]==1:
draw.text((9,tly+tmy+1)," ♪",font=fontm,fill=(55,255,255))
disp.display(img)
if mode1_coordi ==1:
midicounter -= 1
if midicounter == -1:
midicounter = len(midi)-1
draw.rectangle((tmx*5,tly+tmy*3+3,160,tly+tmy*4+2),(0,0,0))
draw.rectangle((9,tly+tmy*4+1,160,tly+tmy*5+2),(0,0,0))
draw.text((9+tmx*4,tly+tmy*3+1),"{0:03d}/{1:03d}".format(midicounter + 1 ,len(midi) ),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*4+1),midi[midicounter],font=fontm,fill=(255,255,55))
if playflag[midicounter]==1:
draw.text((9,tly+tmy*3+1)," ▶",font=fontm,fill=(55,255,255))
disp.display(img)
if mode==3:
wificounter -=1
if wificounter == -1:
wificounter = len(wifi) -1
draw.rectangle((9+tmx*5,tly+tmy+1,9+tmx*8,tly+tmy*2+2),(0,0,0))
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9,tly+tmy+1)," {0:03d}" .format(wificounter+1),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*2+1),wifi[wificounter],font=fontm,fill=(255,255,55))
disp.display(img)
longpush_(input_LEFT)
if GPIO.input(input_RIGHT) == 0 and GPIO.input(input_MODE) != 0:
time.sleep(0.0001)
if mode==0:
if mode0_coordi ==0:
midCH +=1
if midCH>15:
midCH=0
draw.rectangle((tlx*4,0,tlx*6,tly),(0,0,0))
draw.text((tlx*4,0),str("{0:02}".format(midCH + 1)),font=fontl,fill=(255,255,55))
draw.rectangle((tmx*5,tly+tmy+1,65,128),(0,0,0))
draw.text((tmx*5,tly+tmy+1),str("{0:03d}".format(midPRG[midCH] + 1)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*2+1),str("{0:03d}".format(midCC7[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*3+1),str("{0:03d}".format(midCC11[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*4+1),str("{0:03d}".format(midCC10[midCH]-64)),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*5+1),str("{0:03d}".format(midCC1[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*6+1),str("{0:03d}".format(midCC91[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*5,tly+tmy*7+1),str("{0:03d}".format(midCC93[midCH])),font=fontm,fill=(255,255,55))
draw.rectangle((tmx*18,tly+tmy+1,160,128),(0,0,0))
draw.text((tmx*18,tly+tmy+1),str("{0:03d}".format(midCC94[midCH])),font=fontm,fill=(255,255,55))
draw.text((tmx*18,tly+tmy*2+1),str("{0:04d}".format(0x80*pb2[midCH]+pb1[midCH]-8192)),font=fontm,fill=(255,255,55))
mode0_write= True
if mode0_coordi ==1:
midPRG[midCH] +=1
if midPRG[midCH] >127:
midPRG[midCH] =0
draw.rectangle((tmx*5,tly+tmy+1,65,tly+tmy*2),(0,0,0))
draw.text((tmx*5,tly+tmy+1),str("{0:03d}".format(midPRG[midCH] + 1)),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xc0+midCH,midPRG[midCH]])
if mode0_coordi ==2:
midCC7[midCH] +=1
if midCC7[midCH] >127:
midCC7[midCH] =0
draw.rectangle((tmx*5,tly+tmy*2+1,65,tly+tmy*3),(0,0,0))
draw.text((tmx*5,tly+tmy*2+1),str("{0:03d}".format(midCC7[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,7,midCC7[midCH]])
if mode0_coordi ==3:
midCC11[midCH] +=1
if midCC11[midCH] >127:
midCC11[midCH] =0
draw.rectangle((tmx*5,tly+tmy*3+1,65,tly+tmy*4),(0,0,0))
draw.text((tmx*5,tly+tmy*3+1),str("{0:03d}".format(midCC11[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,11,midCC11[midCH]])
if mode0_coordi ==4:
midCC10[midCH] +=1
if midCC10[midCH] >127:
midCC10[midCH] =0
draw.rectangle((tmx*5,tly+tmy*4+1,65,tly+tmy*5),(0,0,0))
draw.text((tmx*5,tly+tmy*4+1),str("{0:03d}".format(midCC10[midCH]-64)),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,10,midCC10[midCH]])
if mode0_coordi ==5:
midCC1[midCH] +=1
if midCC1[midCH] >127:
midCC1[midCH] =0
draw.rectangle((tmx*5,tly+tmy*5+1,65,tly+tmy*6),(0,0,0))
draw.text((tmx*5,tly+tmy*5+1),str("{0:03d}".format(midCC1[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,1,midCC1[midCH]])
if mode0_coordi ==6:
midCC91[midCH] +=1
if midCC91[midCH] >127:
midCC91[midCH] =0
draw.rectangle((tmx*5,tly+tmy*6+1,65,tly+tmy*7),(0,0,0))
draw.text((tmx*5,tly+tmy*6+1),str("{0:03d}".format(midCC91[midCH])),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xb0+midCH,91,midCC91[midCH]])
if mode0_coordi ==7:
midCC93[midCH] +=1
if midCC93[midCH] >127:
midCC93[midCH] =0
draw.rectangle((tmx*5,tly+tmy*7+1,65,128),(0,0,0))
draw.text((tmx*5,tly+tmy*7+1),str("{0:03d}".format(midCC93[midCH])),font=fontm,fill=(255,255,55))
midiout.send_message([0xb0+midCH,93,midCC93[midCH]])
mode0_write= True
if mode0_coordi ==8:
midCC94[midCH] +=1
if midCC94[midCH] >127:
midCC94[midCH] =0
draw.rectangle((tmx*18,tly+tmy+1,160,tly+tmy*2),(0,0,0))
draw.text((tmx*18,tly+tmy+1),str("{0:03d}".format(midCC94[midCH])),font=fontm,fill=(255,255,55))
midiout.send_message([0xb0+midCH,94,midCC94[midCH]])
mode0_write= True
if mode0_coordi ==9:
if pb1[midCH] < 0x7f:
pb1[midCH] += 0x01
elif pb1[midCH] == 0x7f and pb2[midCH] < 0x7f:
pb2[midCH] += 0x01
pb1[midCH] = 0x00
elif pb1[midCH] == 0x7f and pb2[midCH] == 0x7f:
pb1[midCH] = 0x00
pb2[midCH] = 0x00
draw.rectangle((tmx*18,tly+tmy*2+1,160,tly+tmy*3),(0,0,0))
draw.text((tmx*18,tly+tmy*2+1),str("{0:04d}".format(0x80*pb2[midCH]+pb1[midCH]-8192)),font=fontm,fill=(255,255,55))
mode0_write= True
midiout.send_message([0xe0+midCH,pb1[midCH],pb2[midCH]])
if mode==1:
if mode1_coordi ==0:
sf2counter += 1
if sf2counter == len(sf2):
sf2counter = 0
draw.rectangle((tmx*5,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9+tmx*4,tly+tmy+1),"{0:03d}/{1:03d}" .format(sf2counter + 1 ,len(sf2)),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*2+1),sf2[sf2counter],font=fontm,fill=(255,255,55))
if sf2used[sf2counter]==1:
draw.text((9,tly+tmy+1)," ♪",font=fontm,fill=(55,255,255))
disp.display(img)
if mode1_coordi ==1:
midicounter += 1
if midicounter == len(midi):
midicounter = 0
draw.rectangle((tmx*5,tly+tmy*3+3,160,tly+tmy*4+2),(0,0,0))
draw.rectangle((9,tly+tmy*4+1,160,tly+tmy*5+2),(0,0,0))
draw.text((9+tmx*4,tly+tmy*3+1),"{0:03d}/{1:03d}".format(midicounter + 1 ,len(midi) ),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*4+1),midi[midicounter],font=fontm,fill=(255,255,55))
if playflag[midicounter]==1:
draw.text((9,tly+tmy*3+1)," ▶",font=fontm,fill=(55,255,255))
disp.display(img)
if mode==3:
wificounter +=1
if wificounter==len(wifi):
wificounter=0
draw.rectangle((9+tmx*5,tly+tmy+1,9+tmx*8,tly+tmy*2+2),(0,0,0))
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9,tly+tmy+1)," {0:03d}" .format(wificounter+1),font=fontm,fill=(55,255,255))
draw.text((9,tly+tmy*2+1),wifi[wificounter],font=fontm,fill=(255,255,55))
disp.display(img)
longpush_(input_RIGHT)
if GPIO.input(input_UP) == 0 and GPIO.input(input_MODE) != 0:
time.sleep(0.01)
if mode==0 and GPIO.input(input_OK) != 0:
draw.rectangle((mode0_coordi_xl[mode0_coordi],mode0_coordi_yl[mode0_coordi],mode0_coordi_xl[mode0_coordi]+csx,mode0_coordi_yl[mode0_coordi]+csy),(0,0,0))
mode0_coordi -=1
if mode0_coordi <0:
mode0_coordi=9
draw.text((mode0_coordi_xl[mode0_coordi],mode0_coordi_yl[mode0_coordi]),cur_size,font=fontss,fill=(255,255,255))
mode0_write= True
if mode==1 and GPIO.input(input_OK) != 0:
draw.rectangle((mode1_coordi_xl[mode1_coordi],mode1_coordi_yl[mode1_coordi],mode1_coordi_xl[mode1_coordi]+csx,mode1_coordi_yl[mode1_coordi]+csy),(0,0,0))
mode1_coordi -=1
if mode1_coordi <0:
mode1_coordi=1
draw.text((mode1_coordi_xl[mode1_coordi],mode1_coordi_yl[mode1_coordi]),cur_size,font=fontss,fill=(255,255,255))
disp.display(img)
if mode==2 and GPIO.input(input_OK) != 0:
draw.rectangle((mode2_coordi_xl[mode2_coordi],mode2_coordi_yl[mode2_coordi],mode2_coordi_xl[mode2_coordi]+csx,mode2_coordi_yl[mode2_coordi]+csy),(0,0,0))
mode2_coordi -=1
if mode2_coordi <0:
mode2_coordi=6
draw.text((mode2_coordi_xl[mode2_coordi],mode2_coordi_yl[mode2_coordi]),cur_size,font=fontss,fill=(255,255,255))
disp.display(img)
longpush_(input_UP)
if GPIO.input(input_DOWN) == 0 and GPIO.input(input_MODE) != 0:
time.sleep(0.01)
if mode==0 and GPIO.input(input_OK) != 0:
draw.rectangle((mode0_coordi_xl[mode0_coordi],mode0_coordi_yl[mode0_coordi],mode0_coordi_xl[mode0_coordi]+csx,mode0_coordi_yl[mode0_coordi]+csy),(0,0,0))
mode0_coordi +=1
if mode0_coordi >9:
mode0_coordi=0
draw.text((mode0_coordi_xl[mode0_coordi],mode0_coordi_yl[mode0_coordi]),cur_size,font=fontss,fill=(255,255,255))
mode0_write= True
if mode==1 and GPIO.input(input_OK) != 0:
draw.rectangle((mode1_coordi_xl[mode1_coordi],mode1_coordi_yl[mode1_coordi],mode1_coordi_xl[mode1_coordi]+csx,mode1_coordi_yl[mode1_coordi]+csy),(0,0,0))
mode1_coordi +=1
if mode1_coordi >1:
mode1_coordi=0
draw.text((mode1_coordi_xl[mode1_coordi],mode1_coordi_yl[mode1_coordi]),cur_size,font=fontss,fill=(255,255,255))
disp.display(img)
if mode==2 and GPIO.input(input_OK) != 0:
draw.rectangle((mode2_coordi_xl[mode2_coordi],mode2_coordi_yl[mode2_coordi],mode2_coordi_xl[mode2_coordi]+csx,mode2_coordi_yl[mode2_coordi]+csy),(0,0,0))
mode2_coordi +=1
if mode2_coordi >6:
mode2_coordi=0
draw.text((mode2_coordi_xl[mode2_coordi],mode2_coordi_yl[mode2_coordi]),cur_size,font=fontss,fill=(255,255,255))
disp.display(img)
longpush_(input_DOWN)
if GPIO.input(input_MODE) == 0:
time.sleep(0.0001)
if GPIO.input(input_RIGHT) == 0 and GPIO.input(input_LEFT) == 1 and GPIO.input(input_UP) == 1 and GPIO.input(input_DOWN) == 1:
dialog_open=0
time.sleep(0.01)
mode +=1
if mode >2:
mode=0
if mode==0:
mode0_default_disp()
if mode==1:
mode1_default_disp()
if sf2used[sf2counter]==1:
draw.text((9,tly+tmy+1)," ♪",font=fontm,fill=(55,255,255))
disp.display(img)
if playflag[midicounter]==1:
draw.text((9,tly+tmy*3+1)," ▶",font=fontm,fill=(55,255,255))
disp.display(img)
if mode==2:
mode2_default_disp()
longpush_(input_RIGHT)
if GPIO.input(input_LEFT) == 0 and GPIO.input(input_RIGHT) == 1 and GPIO.input(input_UP) == 1 and GPIO.input(input_DOWN) == 1:
dialog_open=0
time.sleep(0.01)
mode -=1
if mode <0:
mode=2
if mode==0:
mode0_default_disp()
if mode==1:
mode1_default_disp()
if sf2used[sf2counter]==1:
draw.text((9,tly+tmy+1)," ♪",font=fontm,fill=(55,255,255))
disp.display(img)
if playflag[midicounter]==1:
draw.text((9,tly+tmy*3+1)," ▶",font=fontm,fill=(55,255,255))
disp.display(img)
if mode==2:
mode2_default_disp()
longpush_(input_LEFT)
if GPIO.input(input_UP) == 0 and GPIO.input(input_DOWN) == 1 and GPIO.input(input_LEFT) == 1 and GPIO.input(input_RIGHT) == 1:
dialog_open=0
time.sleep(0.01)
volume +=1
if volume>100:
volume=0
subprocess.call('amixer cset numid=1 {}% > /dev/null'.format(volume) ,shell=True)
draw.rectangle((tlx*8+tsx*8,0,tlx*9+tsx*10,tsy),(0,0,0))
draw.text((tlx*8+tsx*8,0),str(volume),font=fontss,fill=(0,255,0))
if mode !=0:
disp.display(img)
if mode ==0:
mode0_write= True
longpush_(input_UP)
if GPIO.input(input_DOWN) == 0 and GPIO.input(input_UP) == 1 and GPIO.input(input_LEFT) == 1 and GPIO.input(input_RIGHT) == 1:
dialog_open=0
time.sleep(0.01)
volume -=1
if volume<0:
volume=100
subprocess.call('amixer cset numid=1 {}% > /dev/null'.format(volume) ,shell=True)
draw.rectangle((tlx*8+tsx*8,0,tlx*9+tsx*10,tsy),(0,0,0))
draw.text((tlx*8+tsx*8,0),str(volume),font=fontss,fill=(0,255,0))
if mode !=0:
disp.display(img)
if mode ==0:
mode0_write= True
longpush_(input_DOWN)
if GPIO.input(input_LEFT) == 0 and GPIO.input(input_RIGHT) == 0 and GPIO.input(input_UP) == 0 and GPIO.input(input_DOWN) == 0 and GPIO.input(input_OK) == 0:
allnoteoff()
ST7735.ST7735(port=0,cs=0,dc=12,rst=25,rotation=90,width=128,height=160,spi_speed_hz=31200000+15600000)
while GPIO.input(input_LEFT) == 0 and GPIO.input(input_RIGHT) == 0 and GPIO.input(input_UP) == 0 and GPIO.input(input_DOWN) == 0 and GPIO.input(input_OK) == 0:
continue
if GPIO.input(input_OK) == 0 and GPIO.input(input_MODE) != 0:
time.sleep(0.01)
if mode==0:
allnoteoff()
while (GPIO.input(input_OK)) == 0:
continue
if mode==1 and mode1_coordi ==0 and sf2used[sf2counter]==0 and sf2[0] != "sf2_None":
time.sleep(0.05)
sf2used = [0]*len(sf2)
sf2used[sf2counter]=1
draw.rectangle((tmx*13,tly+tmy+1,160,tly+tmy*2+2),(0,0,0))
waitflag=3
subprocess.call('sudo killall timidity',shell=True)
subprocess.call('sudo killall aplaymidi',shell=True)
playflag = [0]*len(midi)
subprocess.Popen('sudo timidity -c "/media/usb0/timidity_cfg/{}.cfg"' .format(sf2[sf2counter]),shell=True)
time.sleep(3)
subprocess.call('sh /home/pi/ysynth4/midiconnect.sh',shell=True)
draw.rectangle((9,tly+tmy*2+1,160,tly+tmy*3+2),(0,0,0))
draw.text((9,tly+tmy*2+1),"準備完了!",font=fontm,fill=(255,255,55))
waitflag=0
disp.display(img)
midPRG= [0]*16
midCC7= [100]*16
midCC11= [127]*16
midCC10= [64]*16
midCC1= [0]*16
midCC91= [40]*16
midCC93= [0]*16
midCC94= [0]*16
pb1 = [0]*16
pb2 = [0x40]*16
time.sleep(2)
mode1_default_disp()
if GPIO.input(input_OK) == 0 and mode==1 and mode1_coordi ==1 :
if playflag[midicounter]==0 and midi[0] != "midi_None":
time.sleep(0.05)
playflag = [0]*len(midi)
playflag[midicounter]=1
draw.rectangle((tmx*13,tly+tmy*3+1,160,tly+tmy*4+2),(0,0,0))
draw.text((9,tly+tmy*3+1)," ▶",font=fontm,fill=(55,255,255))
subprocess.call('sudo killall aplaymidi',shell=True)
allnoteoff()
aplaymidi = subprocess.Popen('aplaymidi -p 14:0 "/media/usb0/midi/{}.mid"' .format(midi[midicounter]),shell=True)
disp.display(img)
mode1_default_disp()
while (GPIO.input(input_OK)) == 0:
continue
if GPIO.input(input_OK) == 0 and playflag[midicounter]==1:
time.sleep(0.05)
playflag = [0]*len(midi)
draw.rectangle((tmx*13,tly+tmy*3+1,160,tly+tmy*4+2),(0,0,0))
allnoteoff()
subprocess.call('sudo killall aplaymidi',shell=True)
allnoteoff()
disp.display(img)
mode1_default_disp()
while (GPIO.input(input_OK)) == 0:
continue
if mode==2 and mode2_coordi ==0:
time.sleep(0.05)
while (GPIO.input(input_OK)) == 0:
continue
mode=3
if mode==2 and mode2_coordi ==1:
time.sleep(0.05)
dialog_window0()
if audio_card == str("IQaudIODAC") or audio_card == str("") :
draw.text((11,tly+tmy*2+1)," bcm2835に切り替えます",font=fontss,fill=(0,0,0))
draw.text((11,tly+tmy*3+1),"か?(再起動します)",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
dialog_loop0(" 再起動します...","")
if dlog_zahyo==0:
subprocess.call("sudo sed -i -e '$ a dtparam=audio=on' /boot/config.txt" ,shell=True)
subprocess.call("sudo sed -i -e '/dtoverlay=iqaudio-dacplus/d' /boot/config.txt" ,shell=True)
subprocess.call("sudo sed -i -e '$ a opt B3,8' /usr/local/share/timidity/timidity.cfg" ,shell=True)
subprocess.call("sudo sed -i -e '/opt B2,8/d' /usr/local/share/timidity/timidity.cfg" ,shell=True)
subprocess.call("sudo reboot" ,shell=True)
if audio_card == str("bcm2835"):
draw.text((11,tly+tmy*2+1)," IQaudIODACに切り替えま",font=fontss,fill=(0,0,0))
draw.text((11,tly+tmy*3+1),"すか?(再起動します)",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
dialog_loop0(" 再起動します...","")
if dlog_zahyo==0:
subprocess.call("sudo sed -i -e '$ a dtoverlay=iqaudio-dacplus' /boot/config.txt" ,shell=True)
subprocess.call("sudo sed -i -e '/dtparam=audio=on/d' /boot/config.txt" ,shell=True)
subprocess.call("sudo sed -i -e '$ a opt B2,8' /usr/local/share/timidity/timidity.cfg" ,shell=True)
subprocess.call("sudo sed -i -e '/opt B3,8/d' /usr/local/share/timidity/timidity.cfg" ,shell=True)
subprocess.call("sudo reboot" ,shell=True)
if mode==2 and mode2_coordi ==2:
time.sleep(0.05)
dialog_window0()
if mountcheck != str("/media/usb0"):
draw.text((11,tly+tmy*2+1)," 認識させますか?",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
dialog_loop0(" 認識します...","sudo mount -t vfat -o ,iocharset=utf8 /dev/sda1 /media/usb0")
if dlog_zahyo==0:
subprocess.call('rename.ul .MID .mid /media/usb0/midi/*' ,shell=True)
subprocess.call('rename.ul .SF2 .sf2 /media/usb0/sf2/*' ,shell=True)
subprocess.call('rename.ul .CFG .cfg /media/usb0/timidity_cfg/*' ,shell=True)
midi = subprocess.check_output('find /media/usb0/midi/ -name \*.mid|sort' ,shell=True).decode('utf-8').strip().replace('/media/usb0/midi/','').replace('.mid','').replace('.MID','').split('\n')
playflag = [0]*len(midi)
if midi[0]=='':
midi= ["midi_None"]
midicounter=0
sf2 = subprocess.check_output('find /media/usb0/sf2/ -name \*.sf2|sort' ,shell=True).decode('utf-8').strip().replace('/media/usb0/sf2/','').replace('.sf2','').replace('.SF2','').split('\n')
sf2used = [0]*len(sf2)
if sf2[0]=='':
sf2 = ["sf2_None"]
sf2counter = 0
cfg = subprocess.check_output('find /media/usb0/timidity_cfg/ -name \*.cfg|sort' ,shell=True).decode('utf-8').strip().replace('/media/usb0/timidity_cfg/','').replace('.cfg','').split('\n')
if (sf2 != cfg) and (sf2[0] != "sf2_None"):
list_difference = list(set(cfg) - set(sf2))
for x in range(len(list_difference)):
subprocess.call('sudo rm /media/usb0/timidity_cfg/{}.cfg' .format(list_difference[x]) ,shell=True)
list_difference = list(set(sf2) - set(cfg))
for x in range(len(list_difference)):
subprocess.call('''sudo /home/pi/ysynth4/cfgforsf -C "/media/usb0/sf2/{sf2name}.sf2" | sed -e 's/(null)//' -e 's/^[ ]*//g' -e '/(null)#/d' -e /^#/d | grep -C 1 % | sed -e '/--/d' -e /^$/d > "/media/usb0/timidity_cfg/{sf2name}.cfg"''' .format(sf2name=list_difference[x]) ,shell=True)
if sf2[0] == "sf2_None":
subprocess.call('sudo rm "/home/pi/timidity_cfg/*.cfg"' ,shell=True)
time.sleep(2)
dlog_zahyo=1
mountcheck=subprocess.check_output("mount|grep -m1 /dev/sda|awk '{print $3}'" ,shell=True).decode('utf-8').strip()
mode2_default_disp()
continue
if mountcheck == str("/media/usb0"):
draw.text((11,tly+tmy*2+1)," 取り出しますか?",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
dialog_loop0(" 取り出します...","sudo umount /media/usb0/")
if dlog_zahyo==0:
subprocess.call('sudo killall timidity',shell=True)
subprocess.call('sudo killall aplaymidi',shell=True)
midi= ["midi_None"]
midicounter=0
playflag = [0]
sf2 = ["sf2_None"]
sf2counter = 0
sf2used = [0]
cfg = [ ]
dlog_zahyo=1
while mountcheck == str("/media/usb0"):
mountcheck=subprocess.check_output("mount|grep -m1 /dev/sda|awk '{print $3}'" ,shell=True).decode('utf-8').strip()
mode2_default_disp()
if mode==2 and mode2_coordi ==3:
time.sleep(0.05)
dialog_window0()
draw.text((11,tly+tmy*2+1)," アップデートしますか?",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
subprocess.call("sudo rm /home/pi/ysynth4/ysynth4.py.*" ,shell=True)
dialog_loop0(" ダウンロード中...","wget https://raw.githubusercontent.com/YoutechA320U/ysynth4/master/ysynth4.py -P /home/pi/ysynth4/")
latest_dl =int(subprocess.check_output("test -f /home/pi/ysynth4/ysynth4.py.1;echo $?" ,shell=True).decode('utf-8').strip())
if latest_dl == 1 and dlog_zahyo==0:
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60)," ダウンロード失敗",font=fontss,fill=(0,255,0))
disp.display(img)
dlog_zahyo=1
time.sleep(2)
mode2_default_disp()
if latest_dl ==0:
download_v=float(subprocess.check_output("grep -m1 version= /home/pi/ysynth4/ysynth4.py.1|awk '{print $2;}'" ,shell=True).decode('utf-8').strip().replace('\nysynth4/ysynth4.py.1|grep',''))
if download_v > version:
draw.rectangle((0,0,160,128),(0,0,0))
waitflag=4
subprocess.call('sudo chown -R pi:pi /home/pi/' ,shell=True)
subprocess.call("sudo mv -f /home/pi/ysynth4/ysynth4.py.1 /home/pi/ysynth4/ysynth4.py" ,shell=True)
subprocess.call("wget https://raw.githubusercontent.com/YoutechA320U/ysynth4/master/setup.sh -P /home/pi/ysynth4/" ,shell=True)
subprocess.call("sudo mv -f /home/pi/ysynth4/setup.sh.1 /home/pi/ysynth4/setup.sh" ,shell=True)
subprocess.call("sudo sh /home/pi/ysynth4/setup.sh" ,shell=True)
waitflag=0
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60)," リロードします...",font=fontss,fill=(0,255,0))
disp.display(img)
subprocess.call('sudo systemctl restart ysynth4.service',shell=True)
if download_v <= version:
draw.rectangle((0,0,160,128),(0,0,0))
draw.text((3,60)," 最新のバージョンです",font=fontss,fill=(0,255,0))
subprocess.call("sudo rm /home/pi/ysynth4/ysynth4.py.*" ,shell=True)
disp.display(img)
dlog_zahyo=1
time.sleep(2)
mode2_default_disp()
if mode==2 and mode2_coordi ==4:
time.sleep(0.05)
dialog_window0()
draw.text((11,tly+tmy*2+1)," 再起動しますか?",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
dialog_loop0(" 再起動します...","sudo reboot")
if mode==2 and mode2_coordi ==5:
time.sleep(0.05)
dialog_window0()
draw.text((11,tly+tmy*2+1),"シャットダウンしますか?",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
dialog_loop0(" シャットダウンします...","sudo shutdown -h now")
if mode==2 and mode2_coordi ==6:
time.sleep(0.05)
dialog_window0()
draw.text((11,tly+tmy*2+1)," リロードしますか?",font=fontss,fill=(0,0,0))
draw.text((dlog_zahyo_xl[dlog_zahyo],dlog_zahyo_yl[dlog_zahyo]),cur_size,font=fontss,fill=(0,0,0))
disp.display(img)
dialog_loop0(" リロードします...","sudo systemctl restart ysynth4.service")
if mode==3 and dialog_open !=1:
mode3_default_disp()
dialog_open=1
if mode==3 and dialog_open ==1:
if GPIO.input(input_OK) == 0 and wifi[0] !="見つかりませんでした":
time.sleep(0.05)
sc_key()
if (GPIO.input(input_LEFT) and GPIO.input(input_RIGHT) and GPIO.input(input_UP) and GPIO.input(input_DOWN) and GPIO.input(input_OK))== 1:
longpush=0
def mode0_write_():
global mode0_write
while True:
time.sleep(0.0001)
if mode0_write is True and mode==0:
mode0_write= False
disp.display(img)
#time.sleep(0.01)
thread2 = threading.Thread(target=mididisp)
thread3 = threading.Thread(target=ysynthmain)
thread4 = threading.Thread(target=mode0_write_)
thread2.start()
thread3.start()
thread4.start()
| 51.775229
| 302
| 0.575466
| 12,102
| 79,009
| 3.654603
| 0.044125
| 0.013657
| 0.009632
| 0.014086
| 0.854956
| 0.835805
| 0.821832
| 0.804377
| 0.784277
| 0.773537
| 0
| 0.106775
| 0.229151
| 79,009
| 1,525
| 303
| 51.80918
| 0.619122
| 0.019188
| 0
| 0.654874
| 0
| 0.018051
| 0.110703
| 0.031975
| 0
| 0
| 0.002791
| 0
| 0
| 1
| 0.010108
| false
| 0.00361
| 0.00722
| 0
| 0.017329
| 0.004332
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
46b0ebcb22b50425854864c846ff98e810ec7c7e
| 87
|
py
|
Python
|
python/testData/debug/test_exceptbreak.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/debug/test_exceptbreak.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/debug/test_exceptbreak.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | 1
|
2020-11-27T10:36:50.000Z
|
2020-11-27T10:36:50.000Z
|
def foo(x):
return 1/x
def zoo(x):
res = foo(x)
return res
print(zoo(0))
| 9.666667
| 16
| 0.54023
| 17
| 87
| 2.764706
| 0.529412
| 0.170213
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032787
| 0.298851
| 87
| 8
| 17
| 10.875
| 0.737705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d3c6c109ab642a395cfbb79ecb252cfc7b21c345
| 29
|
py
|
Python
|
apps/event/__init__.py
|
dy1zan/softwarecapstone
|
c121a2b2d43b72aac19b75c31519711c0ace9c02
|
[
"MIT"
] | null | null | null |
apps/event/__init__.py
|
dy1zan/softwarecapstone
|
c121a2b2d43b72aac19b75c31519711c0ace9c02
|
[
"MIT"
] | 16
|
2018-11-10T21:46:40.000Z
|
2018-11-11T15:08:36.000Z
|
apps/event/__init__.py
|
dy1zan/softwarecapstone
|
c121a2b2d43b72aac19b75c31519711c0ace9c02
|
[
"MIT"
] | null | null | null |
from .apps import EventConfig
| 29
| 29
| 0.862069
| 4
| 29
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d3faa1a242222c26692ccd00de6f89748b4b4dcc
| 26
|
py
|
Python
|
ext/workflows/__init__.py
|
FNLF/fnlf-backend
|
060d675d7cf8d0eff46af6eb4be7035b8cd68d36
|
[
"MIT"
] | 1
|
2015-01-14T22:08:27.000Z
|
2015-01-14T22:08:27.000Z
|
ext/workflows/__init__.py
|
FNLF/fnlf-backend
|
060d675d7cf8d0eff46af6eb4be7035b8cd68d36
|
[
"MIT"
] | 103
|
2015-01-08T13:45:38.000Z
|
2022-01-13T00:38:39.000Z
|
ext/workflows/__init__.py
|
FNLF/fnlf-backend
|
060d675d7cf8d0eff46af6eb4be7035b8cd68d36
|
[
"MIT"
] | null | null | null |
from . import observation
| 13
| 25
| 0.807692
| 3
| 26
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 2
| 25
| 13
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
31332909d815e7f351d2d4f1f76f86ba0ab1068d
| 18,551
|
py
|
Python
|
custom_components/miele/devcap.py
|
astrandb/miele
|
3a22099daf85c5dc80b0c07f5ad69ed2c13204e8
|
[
"MIT"
] | 13
|
2022-01-06T05:48:39.000Z
|
2022-03-31T22:23:33.000Z
|
custom_components/miele/devcap.py
|
astrandb/miele
|
3a22099daf85c5dc80b0c07f5ad69ed2c13204e8
|
[
"MIT"
] | 25
|
2022-01-22T14:48:29.000Z
|
2022-03-31T21:33:07.000Z
|
custom_components/miele/devcap.py
|
astrandb/miele
|
3a22099daf85c5dc80b0c07f5ad69ed2c13204e8
|
[
"MIT"
] | 2
|
2022-01-20T22:43:22.000Z
|
2022-03-25T13:18:24.000Z
|
"""Device capabilities."""
# API Version 1.0.5
DEV_TYPES = {
1: "Washing Machine",
2: "Thumble Dryer",
7: "Dishwasher",
19: "Fridge",
20: "Freezer",
74: "TwoInOne Hob",
}
STATE_CAPABILITIES = {
19: {
"ProgramID",
"status",
"programType",
"targetTemperature",
"temperature",
"signalInfo",
"signalFailure",
"signalDoor",
"remoteEnable",
},
20: {
"ProgramID",
"status",
"programType",
"targetTemperature",
"temperature",
"signalInfo",
"signalFailure",
"signalDoor",
"remoteEnable",
},
}
ACTION_CAPABILITIES = {
19: {"targetTemperature", "startSupercooling"},
20: {"targetTemperature", "startSuperfreezing"},
}
LIVE_ACTION_CAPABILITIES = {
"711934968": {
"processAction": [4],
"light": [],
"ambientLight": [],
"startTime": [],
"ventilationStep": [],
"programId": [],
"targetTemperature": [{"zone": 1, "min": -26, "max": -16}],
"deviceName": True,
"powerOn": False,
"powerOff": False,
"colors": [],
"modes": [1],
},
"711944869": {
"processAction": [6],
"light": [],
"ambientLight": [],
"startTime": [],
"ventilationStep": [],
"programId": [],
"targetTemperature": [{"zone": 1, "min": 1, "max": 9}],
"deviceName": True,
"powerOn": False,
"powerOff": False,
"colors": [],
"modes": [1],
},
}
TEST_DATA_7 = {
"ident": {
"type": {
"key_localized": "Device type",
"value_raw": 7,
"value_localized": "Dishwasher",
},
"deviceName": "",
"protocolVersion": 2,
"deviceIdentLabel": {
"fabNumber": "<fabNumber1>",
"fabIndex": "64",
"techType": "G6865-W",
"matNumber": "<matNumber1>",
"swids": [
"<swid1>",
"<swid2>",
"<swid3>",
"<...>",
],
},
"xkmIdentLabel": {"techType": "EK039W", "releaseVersion": "02.72"},
},
"state": {
"ProgramID": {
"value_raw": 38,
"value_localized": "QuickPowerWash",
"key_localized": "Program name",
},
"status": {
"value_raw": 5,
"value_localized": "In use",
"key_localized": "status",
},
"programType": {
"value_raw": 2,
"value_localized": "Automatic programme",
"key_localized": "Program type",
},
"programPhase": {
"value_raw": 1799,
"value_localized": "Drying",
"key_localized": "Program phase",
},
"remainingTime": [0, 15],
"startTime": [0, 0],
"targetTemperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"}
],
"temperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"signalInfo": False,
"signalFailure": False,
"signalDoor": False,
"remoteEnable": {
"fullRemoteControl": True,
"smartGrid": False,
"mobileStart": False,
},
"ambientLight": None,
"light": None,
"elapsedTime": [0, 59],
"spinningSpeed": {
"unit": "rpm",
"value_raw": None,
"value_localized": None,
"key_localized": "Spin speed",
},
"dryingStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Drying level",
},
"ventilationStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Fan level",
},
"plateStep": [],
"ecoFeedback": {
"currentWaterConsumption": {
"unit": "l",
"value": 12,
},
"currentEnergyConsumption": {
"unit": "kWh",
"value": 1.4,
},
"waterForecast": 0.2,
"energyForecast": 0.1,
},
"batteryLevel": None,
},
}
TEST_DATA_18 = {
"ident": {
"type": {
"key_localized": "Device type",
"value_raw": 18,
"value_localized": "Cooker Hood",
},
"deviceName": "",
"protocolVersion": 2,
"deviceIdentLabel": {
"fabNumber": "<fabNumber3>",
"fabIndex": "64",
"techType": "Fläkt",
"matNumber": "<matNumber3>",
"swids": [
"<swid1>",
"<swid2>",
"<swid3>",
"<...>",
],
},
"xkmIdentLabel": {"techType": "EK039W", "releaseVersion": "02.72"},
},
"state": {
"ProgramID": {
"value_raw": 1,
"value_localized": "Off",
"key_localized": "Program name",
},
"status": {
"value_raw": 1,
"value_localized": "Off",
"key_localized": "status",
},
"programType": {
"value_raw": 0,
"value_localized": "Program",
"key_localized": "Program type",
},
"programPhase": {
"value_raw": 4608,
"value_localized": "",
"key_localized": "Program phase",
},
"remainingTime": [0, 0],
"startTime": [0, 0],
"targetTemperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"}
],
"temperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"signalInfo": False,
"signalFailure": False,
"signalDoor": False,
"remoteEnable": {
"fullRemoteControl": True,
"smartGrid": False,
"mobileStart": False,
},
"ambientLight": 2,
"light": 2,
"elapsedTime": {},
"spinningSpeed": {
"unit": "rpm",
"value_raw": None,
"value_localized": None,
"key_localized": "Spin speed",
},
"dryingStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Drying level",
},
"ventilationStep": {
"value_raw": 0,
"value_localized": "0",
"key_localized": "Fan level",
},
"plateStep": [],
"ecoFeedback": None,
"batteryLevel": None,
},
}
TEST_DATA_21 = {
"ident": {
"type": {
"key_localized": "Device type",
"value_raw": 21,
"value_localized": "Fridge freezer",
},
"deviceName": "",
"protocolVersion": 203,
"deviceIdentLabel": {
"fabNumber": "**REDACTED**",
"fabIndex": "00",
"techType": "KFN 7734 D",
"matNumber": "11642200",
"swids": ["000"],
},
"xkmIdentLabel": {
"techType": "EK037LHBM",
"releaseVersion": "32.15",
},
},
"state": {
"ProgramID": {
"value_raw": 0,
"value_localized": "",
"key_localized": "Program name",
},
"status": {
"value_raw": 5,
"value_localized": "In use",
"key_localized": "status",
},
"programType": {
"value_raw": 0,
"value_localized": "Program",
"key_localized": "Program type",
},
"programPhase": {
"value_raw": 0,
"value_localized": "",
"key_localized": "Program phase",
},
"remainingTime": [0, 0],
"startTime": [0, 0],
"targetTemperature": [
{"value_raw": 500, "value_localized": 5.0, "unit": "Celsius"},
{"value_raw": -1800, "value_localized": -18.0, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"temperature": [
{"value_raw": 493, "value_localized": 4.93, "unit": "Celsius"},
{"value_raw": -1807, "value_localized": -18.07, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"signalInfo": False,
"signalFailure": False,
"signalDoor": False,
"remoteEnable": {
"fullRemoteControl": True,
"smartGrid": False,
"mobileStart": False,
},
"ambientLight": None,
"light": None,
"elapsedTime": [],
"spinningSpeed": {
"unit": "rpm",
"value_raw": None,
"value_localized": "",
"key_localized": "Spin speed",
},
"dryingStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Drying level",
},
"ventilationStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Fan level",
},
"plateStep": [],
"ecoFeedback": None,
"batteryLevel": None,
},
}
TEST_DATA_24 = {
"ident": {
"type": {
"key_localized": "Device type",
"value_raw": 24,
"value_localized": "Washer dryer",
},
"deviceName": "",
"protocolVersion": 4,
"deviceIdentLabel": {
"fabNumber": "<fabNumber2>",
"fabIndex": "32",
"techType": "WTR870",
"matNumber": "<matNumber2>",
"swids": [
"<swid1>",
"<swid2>",
"<swid3>",
"<...>",
],
},
"xkmIdentLabel": {"techType": "EK037", "releaseVersion": "03.88"},
},
"state": {
"ProgramID": {
"value_raw": 3,
"value_localized": "Minimum iron",
"key_localized": "Program name",
},
"status": {"value_raw": 1, "value_localized": "Off", "key_localized": "status"},
"programType": {
"value_raw": 1,
"value_localized": "Own programme",
"key_localized": "Program type",
},
"programPhase": {
"value_raw": 256,
"value_localized": "",
"key_localized": "Program phase",
},
"remainingTime": [1, 59],
"startTime": [0, 0],
"targetTemperature": [
{"value_raw": 3000, "value_localized": 30, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"temperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"signalInfo": False,
"signalFailure": False,
"signalDoor": True,
"remoteEnable": {
"fullRemoteControl": True,
"smartGrid": False,
"mobileStart": False,
},
"ambientLight": None,
"light": None,
"elapsedTime": [0, 0],
"spinningSpeed": {
"unit": "rpm",
"value_raw": 1000,
"value_localized": "1000",
"key_localized": "Spin speed",
},
"dryingStep": {
"value_raw": 0,
"value_localized": "",
"key_localized": "Drying level",
},
"ventilationStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Fan level",
},
"plateStep": [],
"ecoFeedback": None,
"batteryLevel": None,
},
}
TEST_DATA_74 = {
"ident": {
"type": {
"key_localized": "Device type",
"value_raw": 74,
"value_localized": "",
},
"deviceName": "",
"protocolVersion": 203,
"deviceIdentLabel": {
"fabNumber": "**REDACTED**",
"fabIndex": "00",
"techType": "KMDA7634",
"matNumber": "",
"swids": ["000"],
},
"xkmIdentLabel": {
"techType": "EK039W",
"releaseVersion": "02.72",
},
},
"state": {
"ProgramID": {
"value_raw": 0,
"value_localized": "",
"key_localized": "Program name",
},
"status": {
"value_raw": 5,
"value_localized": "In use",
"key_localized": "status",
},
"programType": {
"value_raw": 0,
"value_localized": "Program",
"key_localized": "Program type",
},
"programPhase": {
"value_raw": 0,
"value_localized": "",
"key_localized": "Program phase",
},
"remainingTime": [0, 0],
"startTime": [0, 0],
"targetTemperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"temperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"signalInfo": False,
"signalFailure": False,
"signalDoor": False,
"remoteEnable": {
"fullRemoteControl": True,
"smartGrid": False,
"mobileStart": False,
},
"ambientLight": None,
"light": None,
"elapsedTime": [],
"spinningSpeed": {
"unit": "rpm",
"value_raw": None,
"value_localized": "",
"key_localized": "Spin speed",
},
"dryingStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Drying level",
},
"ventilationStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Fan level",
},
"plateStep": [
{"value_raw": 0, "value_localized": 0, "key_localized": "Power level"},
{"value_raw": 3, "value_localized": 2, "key_localized": "Power level"},
{"value_raw": 7, "value_localized": 4, "key_localized": "Power level"},
{"value_raw": 15, "value_localized": 8, "key_localized": "Power level"},
{"value_raw": 117, "value_localized": 10, "key_localized": "Power level"},
],
"ecoFeedback": None,
"batteryLevel": None,
},
}
TEST_DATA_TEMPLATE = {
"ident": {
"type": {
"key_localized": "Device type",
"value_raw": 0,
"value_localized": "Template",
},
"deviceName": "",
"protocolVersion": 203,
"deviceIdentLabel": {
"fabNumber": "**REDACTED**",
"fabIndex": "00",
"techType": "",
"matNumber": "",
"swids": ["000"],
},
"xkmIdentLabel": {
"techType": "",
"releaseVersion": "",
},
},
"state": {
"ProgramID": {
"value_raw": 0,
"value_localized": "",
"key_localized": "Program name",
},
"status": {
"value_raw": 5,
"value_localized": "In use",
"key_localized": "status",
},
"programType": {
"value_raw": 0,
"value_localized": "Program",
"key_localized": "Program type",
},
"programPhase": {
"value_raw": 0,
"value_localized": "",
"key_localized": "Program phase",
},
"remainingTime": [0, 0],
"startTime": [0, 0],
"targetTemperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"temperature": [
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
{"value_raw": -32768, "value_localized": None, "unit": "Celsius"},
],
"signalInfo": False,
"signalFailure": False,
"signalDoor": False,
"remoteEnable": {
"fullRemoteControl": True,
"smartGrid": False,
"mobileStart": False,
},
"ambientLight": None,
"light": None,
"elapsedTime": [],
"spinningSpeed": {
"unit": "rpm",
"value_raw": None,
"value_localized": "",
"key_localized": "Spin speed",
},
"dryingStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Drying level",
},
"ventilationStep": {
"value_raw": None,
"value_localized": "",
"key_localized": "Fan level",
},
"plateStep": [],
"ecoFeedback": None,
"batteryLevel": None,
},
}
TEST_ACTION_21 = {
"processAction": [4, 6],
"light": [],
"ambientLight": [],
"startTime": [],
"ventilationStep": [],
"programId": [],
"targetTemperature": [
{"zone": 1, "min": 1, "max": 9},
{"zone": 2, "min": -26, "max": -16},
],
"deviceName": True,
"powerOn": False,
"powerOff": True,
"colors": [],
"modes": [1],
"programs": [],
"id_log": [],
}
| 29.399366
| 88
| 0.44596
| 1,331
| 18,551
| 6.033058
| 0.132983
| 0.084682
| 0.065006
| 0.060523
| 0.840598
| 0.80274
| 0.770112
| 0.75878
| 0.70797
| 0.624035
| 0
| 0.039403
| 0.378901
| 18,551
| 630
| 89
| 29.446032
| 0.657525
| 0.002102
| 0
| 0.678571
| 0
| 0
| 0.400357
| 0.00254
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
313b0c39616ef4195670de4e815f8d591baba29d
| 173
|
py
|
Python
|
c3i/app/views.py
|
addinall/python-C3I
|
be72f026fb7c6b5084404876cd1296d3c3cb9b85
|
[
"Unlicense"
] | null | null | null |
c3i/app/views.py
|
addinall/python-C3I
|
be72f026fb7c6b5084404876cd1296d3c3cb9b85
|
[
"Unlicense"
] | null | null | null |
c3i/app/views.py
|
addinall/python-C3I
|
be72f026fb7c6b5084404876cd1296d3c3cb9b85
|
[
"Unlicense"
] | null | null | null |
# from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return HttpResponse("Hello world - You are in the Python version of C3I")
| 21.625
| 74
| 0.780347
| 25
| 173
| 5.4
| 0.84
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006849
| 0.156069
| 173
| 7
| 75
| 24.714286
| 0.917808
| 0.202312
| 0
| 0
| 0
| 0
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
3144c191e0819aad0867bf838ebb824e059d71f9
| 24,938
|
py
|
Python
|
ph5/utilities/tests/test_validation.py
|
PIC-IRIS/PH5
|
4629c0694caf7043c39beced4409af58f18d4751
|
[
"MIT"
] | 21
|
2016-12-07T20:09:31.000Z
|
2022-03-07T22:23:57.000Z
|
ph5/utilities/tests/test_validation.py
|
PIC-IRIS/PH5
|
4629c0694caf7043c39beced4409af58f18d4751
|
[
"MIT"
] | 395
|
2016-11-03T03:43:55.000Z
|
2022-03-08T20:54:22.000Z
|
ph5/utilities/tests/test_validation.py
|
PIC-IRIS/PH5
|
4629c0694caf7043c39beced4409af58f18d4751
|
[
"MIT"
] | 6
|
2016-10-25T22:22:38.000Z
|
2021-05-10T18:19:45.000Z
|
'''
Tests for validation
'''
import unittest
import os
import logging
import sys
from mock import patch
from testfixtures import LogCapture
from ph5.core import ph5api
from ph5 import logger
from ph5.core.tests.test_base import LogTestCase, TempDirTestCase
from ph5.utilities import validation, segd2ph5
class TestValidation_response(LogTestCase, TempDirTestCase):
def setUp(self):
super(TestValidation_response, self).setUp()
ph5path = os.path.join(self.home, "ph5/test_data/ph5")
self.ph5API_object = ph5api.PH5(path=ph5path, nickname='master.ph5')
self.resp_check_info = [
{'n_i': 5, 'array': '002', 'sta': '0407',
'cha_code': 'HHN', 'spr': 200, 'sprm': 1, 'cha_id': 1,
'smodel': 'None CMG-3T', 'dmodel': 'None Q330'},
{'n_i': 1, 'array': '008', 'sta': '8001',
'cha_code': 'HLZ', 'spr': 100, 'sprm': 1, 'cha_id': 1,
'smodel': 'cmg-3t', 'dmodel': 'rt130'},
{'n_i': 4, 'array': '009', 'sta': '9001',
'cha_code': 'DPZ', 'spr': 500, 'sprm': 1, 'cha_id': 1,
'smodel': 'gs11v', 'dmodel': 'rt125a'}]
def tearDown(self):
self.ph5API_object.close()
super(TestValidation_response, self).tearDown()
def test_check_resp_data(self):
checked_data_files = {}
header = "array 008, station 8001, channel 1: "
# data has been loaded for response file rt130_100_1_1
validation.check_resp_data(
self.ph5API_object.ph5,
'/Experiment_g/Responses_g/rt130_100_1_1',
header, checked_data_files, 1)
self.assertEqual(checked_data_files,
{'rt130_100_1_1': ''})
# data has NOT been loaded for response file rt130_200_1_1
with self.assertRaises(Exception) as contxt:
validation.check_resp_data(
self.ph5API_object.ph5,
'/Experiment_g/Responses_g/rt130_200_1_1',
header, checked_data_files, 1)
self.assertEqual(
contxt.exception.message,
('%sResponse_t[1]:No response data loaded for '
'rt130_200_1_1.' % header))
self.assertEqual(
checked_data_files,
{'rt130_200_1_1': ('%sResponse_t[1]:No response data loaded for '
'rt130_200_1_1.' % header),
'rt130_100_1_1': ''})
# data has been loaded for response file cmg3t
validation.check_resp_data(
self.ph5API_object.ph5,
'/Experiment_g/Responses_g/cmg3t',
header, checked_data_files, 1)
self.assertEqual(
checked_data_files,
{'rt130_200_1_1': ('%sResponse_t[1]:No response data loaded for '
'rt130_200_1_1.' % header),
'rt130_100_1_1': '', 'cmg3t': ''})
# data has NOT been loaded for response file cmg
with self.assertRaises(Exception) as contxt:
validation.check_resp_data(
self.ph5API_object.ph5,
'/Experiment_g/Responses_g/cmg',
header, checked_data_files, 1)
self.assertEqual(
contxt.exception.message,
'%sResponse_t[1]:No response data loaded for cmg.' % header)
self.assertEqual(
checked_data_files,
{'rt130_200_1_1': ('%sResponse_t[1]:No response data loaded for '
'rt130_200_1_1.' % header),
'cmg': ('%sResponse_t[1]:No response data loaded for cmg.'
% header),
'rt130_100_1_1': '', 'cmg3t': ''})
def test_check_metadatatoph5_format(self):
errors = set()
self.ph5API_object.read_response_t()
info = next(item for item in self.resp_check_info if item["n_i"] == 4)
header = "array 009, station 9001, channel 1: "
info['dmodel_no_special_char'] = info['dmodel'].translate(None,
' ,/-=._')
info['smodel_no_special_char'] = info['smodel'].translate(None,
' ,/-=._')
# n_i=4: response_file_das_a has more than parts
# => for sure not created by metadatatoph5
# => return False, no err logged
Response_t = self.ph5API_object.get_response_t_by_n_i(4)
with LogCapture() as log:
ret = validation.check_metadatatoph5_format(
Response_t, info, header, errors, logger)
self.assertFalse(ret)
self.assertEqual(log.records, [])
self.assertEqual(errors, set())
# n_i=5 correct metadata format: 'NoneQ330_NoneCMG3T_200HHN'
# => return True, no err logged
Response_t = self.ph5API_object.get_response_t_by_n_i(5)
info = next(item for item in self.resp_check_info if item["n_i"] == 5)
info['dmodel_no_special_char'] = info['dmodel'].translate(None,
' ,/-=._')
info['smodel_no_special_char'] = info['smodel'].translate(None,
' ,/-=._')
header = "array 002, station 0407, channel 1: "
with LogCapture() as log:
ret = validation.check_metadatatoph5_format(
Response_t, info, header, errors, logger)
self.assertTrue(ret)
self.assertEqual(log.records, [])
self.assertEqual(errors, set())
# n_i=5 sensor model mismatch => 2 parts correct
# => for sure created by metadatatoph5
# => return True, err logged for sensor model inconsistent
Response_t['response_file_das_a'] = \
'/Experiment_g/Responses_g/NoneQ330_CMG3T_200HHN'
err = ("array 002, station 0407, channel 1: Response_t[5]:"
"response_file_das_a 'NoneQ330_CMG3T_200HHN' is inconsistent "
"with Array_t_002:sensor_model=None CMG-3T. Please check with "
"metadatatoph5 format [das_model]_[sensor_model]_[sr][cha] "
"(check doesn't include [cha]).")
with LogCapture() as log:
log.setLevel(logging.ERROR)
ret = validation.check_metadatatoph5_format(
Response_t, info, header, errors, logger)
self.assertTrue(ret)
self.assertEqual(len(log.records), 1)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
# n_i=5 sensor model and sample rate mismatch => only 1 part correct
# => not sure created by metadatoph5
# => return failed check for sensor model and sample rate
errors = set()
Response_t['response_file_das_a'] = \
'/Experiment_g/Responses_g/NoneQ330_CMG3T_100HHN'
with LogCapture() as log:
log.setLevel(logging.ERROR)
ret = validation.check_metadatatoph5_format(
Response_t, info, header, errors, logger)
self.assertEqual(ret, ('', set(['spr', 'smodel'])))
self.assertEqual(len(log.records), 0)
self.assertEqual(errors, set())
# n_i=5 response_file_das_a lack of 1 part, sensor model mismatch
# => only 1 part correct
# => not sure created by metadatatoph5
# => return incomplete and failed check for sensor model
Response_t['response_file_das_a'] = \
'/Experiment_g/Responses_g/NoneQ330_CMG3T'
with LogCapture() as log:
ret = validation.check_metadatatoph5_format(
Response_t, info, header, errors, logger)
self.assertEqual(ret, ('incomplete', set(['smodel'])))
self.assertEqual(len(log.records), 0)
self.assertEqual(errors, set())
# n_i=5 response_file_das_a lack of 1 part
# => 2 parts correct
# => return True, log error for incomplete filename
Response_t['response_file_das_a'] = \
'/Experiment_g/Responses_g/NoneQ330_NoneCMG3T'
err = ("array 002, station 0407, channel 1: Response_t[5]:"
"response_file_das_a 'NoneQ330_NoneCMG3T' is incomplete. "
"Please check with metadatatoph5 format "
"[das_model]_[sensor_model]_[sr][cha] "
"(check doesn't include [cha])."
)
with LogCapture() as log:
ret = validation.check_metadatatoph5_format(
Response_t, info, header, errors, logger)
self.assertTrue(ret)
self.assertEqual(len(log.records), 1)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
errors = set()
# complicated sensor model
# => return True, no error logged
Response_t['response_file_das_a'] = (
'/Experiment_g/Responses_g/'
'RT130_L28LB45Hz270VmsRc395OhmsRs2490Ohms_500DH2')
info['smodel'] = ('L-28LB, 4.5 Hz, 27.0 V/m/s, '
'Rc=395 Ohms, Rs=2490 Ohms')
info['dmodel'] = info['dmodel_no_special_char'] = 'RT130'
info['spr'] = 500
info['smodel_no_special_char'] = info['smodel'].translate(None,
' ,/-=._')
with LogCapture() as log:
ret = validation.check_metadatatoph5_format(
Response_t, info, header, errors, logger)
self.assertEqual(ret, True)
self.assertEqual(log.records, [])
self.assertEqual(errors, set())
def test_check_das_resp_load_format(self):
errors = set()
self.ph5API_object.read_response_t()
Response_t = self.ph5API_object.get_response_t_by_n_i(4)
info = next(item for item in self.resp_check_info if item["n_i"] == 4)
info['dmodel_no_special_char'] = info['dmodel'].translate(None,
' ,/-=._')
header = "array 009, station 9001, channel 1: "
# n_i=4 response_das_file_name is 'rt125a_500_1_32'
with LogCapture() as log:
validation.check_das_resp_load_format(
Response_t, info, header, errors, logger, True)
self.assertEqual(len(log.records), 0)
self.assertEqual(errors, set())
# n_i=4: sample rate mismatch
info['spr'] = 100
err = ("array 009, station 9001, channel 1: Response_t[4]:"
"response_file_das_a 'rt125a_500_1_32' is inconsistent "
"with Array_t_009:sr=100. Please check with resp_load format "
"[das_model]_[sr]_[srm]_[gain].")
with LogCapture() as log:
log.setLevel(logging.ERROR)
validation.check_das_resp_load_format(
Response_t, info, header, errors, logger, False)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
Response_t = self.ph5API_object.get_response_t_by_n_i(5)
info = next(item for item in self.resp_check_info if item["n_i"] == 5)
header = "array 002, station 0407, channel 1: "
info['dmodel_no_special_char'] = info['dmodel'].translate(None,
' ,/-=._')
# n_i=5 sensor model and sample rate mismatch
errors = set()
Response_t['response_file_das_a'] = \
'/Experiment_g/Responses_g/NoneQ330_CMG3T_100HHN'
err = ("array 002, station 0407, channel 1: Response_t[5]:"
"response_file_das_a NoneQ330_CMG3T_100HHN is incomplete or "
"inconsistent with "
"Array_t_002:sr=200 Array_t_002:sensor_model=None CMG-3T "
"Array_t_002:srm=1. Please check with resp_load format "
"[das_model]_[sr]_[srm]_[gain] or metadatatoph5 format "
"[das_model]_[sensor_model]_[sr][cha] "
"(check doesn't include [cha]).")
with LogCapture() as log:
log.setLevel(logging.ERROR)
validation.check_das_resp_load_format(
Response_t, info, header, errors, logger,
('', set(['spr', 'smodel']))
)
self.assertEqual(len(log.records), 1)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
# n_i=5 sensor model mismatch,
# metadatatoph5 format but lack of last part
errors = set()
Response_t['response_file_das_a'] = \
'/Experiment_g/Responses_g/NoneQ330_CMG3T'
err = ("array 002, station 0407, channel 1: "
"Response_t[5]:response_file_das_a NoneQ330_CMG3T is "
"incomplete or inconsistent with Array_t_002:sr=200 "
"Array_t_002:sensor_model=None CMG-3T. "
"Please check with resp_load format "
"[das_model]_[sr]_[srm]_[gain] or metadatatoph5 format "
"[das_model]_[sensor_model]_[sr][cha] "
"(check doesn't include [cha]).")
with LogCapture() as log:
log.setLevel(logging.ERROR)
validation.check_das_resp_load_format(
Response_t, info, header, errors, logger,
('incomplete', set(['smodel']))
)
self.assertEqual(len(log.records), 1)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
# response_file_das_a is blank but not log as error here
# because it will return false for ph5tostationxml to throw err
Response_t['response_file_das_a'] = ''
errors = set()
with LogCapture() as log:
validation.check_das_resp_load_format(
Response_t, info, header, errors, logger, True)
self.assertEqual(log.records, [])
self.assertEqual(errors, set([]))
def test_check_sensor(self):
errors = set()
self.ph5API_object.read_response_t()
Response_t = self.ph5API_object.get_response_t_by_n_i(4)
info = next(item for item in self.resp_check_info if item["n_i"] == 4)
info['smodel_no_special_char'] = info['smodel'].translate(None,
' ,/-=._')
header = "array 009, station 9001, channel 1: "
# n_i=4: response_sensor_file_name is 'gs11v'
with LogCapture() as log:
ret = validation.check_sensor(
Response_t, info, header, errors, logger)
self.assertIsNone(ret)
self.assertEqual(log.records, [])
self.assertEqual(errors, set([]))
# n_i=4: response_sensor_file_name isn't 'cmg3t'
info['smodel'] = info['smodel_no_special_char'] = 'cmg3t'
err = ("array 009, station 9001, channel 1: Response_t[4]:"
"response_file_sensor_a 'gs11v' is inconsistent with "
"Array_t_009:sensor_model=cmg3t.")
with LogCapture() as log:
log.setLevel(logging.ERROR)
ret = validation.check_sensor(
Response_t, info, header, errors, logger)
self.assertFalse(ret)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
errors = set()
# n_i=4: response_sensor_file_name='', smodel=''
Response_t['response_file_sensor_a'] = ''
info['smodel'] = info['smodel_no_special_char'] = ''
with LogCapture() as log:
log.setLevel(logging.ERROR)
ret = validation.check_sensor(
Response_t, info, header, errors, logger)
self.assertFalse(ret)
self.assertEqual(log.records, [])
self.assertEqual(errors, set([]))
errors = set()
# n_i=4: response_sensor_file_name!='', smodel=''
Response_t['response_file_sensor_a'] = \
'/Experiment_g/Responses_g/gs11v'
info['smodel'] = ''
err = ("array 009, station 9001, channel 1: Response_t[4]:"
"response_file_sensor_a 'gs11v' is inconsistent with "
"Array_t_009:sensor_model=.")
with LogCapture() as log:
log.setLevel(logging.ERROR)
ret = validation.check_sensor(
Response_t, info, header, errors, logger)
self.assertFalse(ret)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
errors = set()
# n_i=4: response_sensor_file_name='', smodel!=''
Response_t['response_file_sensor_a'] = ''
info['smodel'] = 'gs11v'
err = ('array 009, station 9001, channel 1: Response_t[4]:'
'response_file_sensor_a is blank while sensor model exists.')
with LogCapture() as log:
log.setLevel(logging.ERROR)
ret = validation.check_sensor(
Response_t, info, header, errors, logger)
self.assertFalse(ret)
self.assertEqual(log.records[0].msg, err)
self.assertEqual(errors, set([(err, 'error')]))
def test_check_response_info(self):
self.ph5API_object.read_response_t()
checked_data_files = {}
errors = set()
with LogCapture() as log:
log.setLevel(logging.WARNING)
ret = validation.check_response_info(
self.resp_check_info[2], self.ph5API_object,
checked_data_files, errors, logger)
self.assertEqual(ret, ('/Experiment_g/Responses_g/rt125a_500_1_32',
'/Experiment_g/Responses_g/gs11v'))
self.assertEqual(errors, set())
self.assertEqual(log.records, [])
info = next(item for item in self.resp_check_info if item["n_i"] == 4)
info['spr'] = 100
info['smodel'] = 'cmg3t'
chckerrors = set(
["array 009 station 9001, channel 1: Response_t[4]:"
"response_file_das_a 'rt125a_500_1_32' is inconsistent with "
"Array_t_009:sr=100. Please check with resp_load format "
"[das_model]_[sr]_[srm]_[gain].",
"array 009 station 9001, channel 1: Response_t[4]:"
"response_file_sensor_a 'gs11v' is inconsistent with "
"Array_t_009:sensor_model=cmg3t."])
with LogCapture() as log:
log.setLevel(logging.WARNING)
ret = validation.check_response_info(
info, self.ph5API_object, checked_data_files, errors, logger)
self.assertEqual(ret, ('/Experiment_g/Responses_g/rt125a_500_1_32',
'/Experiment_g/Responses_g/gs11v'))
self.assertEqual(
errors,
{(errmsg, 'error') for errmsg in chckerrors})
self.assertEqual({r.msg for r in log.records},
chckerrors)
errors = set()
info = next(item for item in self.resp_check_info if item["n_i"] == 5)
info['dmodel'] = 'Q330'
info['smodel'] = 'None/CMG3T'
chckerrors = set(
["array 002 station 0407, channel 1: Response_t[5]:"
"response_file_das_a 'NoneQ330_NoneCMG3T_200HHN' is inconsistent "
"with Array_t_002:das_model=Q330. Please check with "
"metadatatoph5 format [das_model]_[sensor_model]_[sr][cha] "
"(check doesn't include [cha])."])
with LogCapture() as log:
log.setLevel(logging.WARNING)
ret = validation.check_response_info(
info, self.ph5API_object, checked_data_files, errors, logger)
self.assertEqual(
ret,
('/Experiment_g/Responses_g/NoneQ330_NoneCMG3T_200HHN', ''))
self.assertEqual(
errors,
{(errmsg, 'error') for errmsg in chckerrors})
self.assertEqual({r.msg for r in log.records},
chckerrors)
response_t = self.ph5API_object.get_response_t_by_n_i(1)
response_t['response_file_das_a'] = 'rt130_200_1_1'
info = next(item for item in self.resp_check_info if item["n_i"] == 1)
info['spr'] = 200
ret = validation.check_response_info(
info, self.ph5API_object, checked_data_files, errors, logger)
self.assertEqual(
ret,
(False, ['array 008 station 8001, channel 1: Response_t[1]:'
'No response data loaded for rt130_200_1_1.']))
info['n_i'] = 8
ret = validation.check_response_info(
info, self.ph5API_object, checked_data_files, errors, logger)
self.assertEqual(ret,
(False, ['array 008 station 8001, channel 1: '
'Response_t has no entry for n_i=8']))
def test_check_response_unique_n_i(self):
self.ph5API_object.read_response_t()
response_t = self.ph5API_object.get_response_t_by_n_i(1)
response_t['n_i'] = 2
errors = set()
validation.check_resp_unique_n_i(self.ph5API_object, errors)
self.assertEqual(
errors,
{('Response_t n_i(s) duplicated: 2. '
'Try to rerun resp_load to see if it fix the problem.',
'error')})
def test_check_has_response_filename(self):
self.ph5API_object.read_response_t()
has_response_file = validation.check_has_response_filename(
self.ph5API_object.Response_t, [], None)
self.assertTrue(has_response_file)
class TestValidation_no_response_filename(LogTestCase, TempDirTestCase):
def tearDown(self):
self.ph5.close()
super(TestValidation_no_response_filename, self).tearDown()
def test_check_has_response_filename(self):
testargs = ['segdtoph5', '-n', 'master.ph5', '-U', '13N', '-r',
os.path.join(self.home,
'ph5/test_data/segd/fairfield/3ch.fcnt')]
with patch.object(sys, 'argv', testargs):
segd2ph5.main()
self.ph5 = ph5api.PH5(path=self.tmpdir, nickname='master.ph5')
self.ph5.read_response_t()
has_response_file = validation.check_has_response_filename(
self.ph5.Response_t, set(), None)
self.assertEqual(has_response_file,
"Response table does not contain any response file "
"names. Check if resp_load has been run or if "
"metadatatoph5 input contained response information.")
class TestValidation_location(unittest.TestCase):
def test_check_lat_lon_elev(self):
station = {'location/X/value_d': 100.0,
'location/X/units_s': 'degrees',
'location/Y/value_d': 70.0,
'location/Y/units_s': 'degrees',
'location/Z/value_d': 1047,
'location/Z/units_s': 'm'}
errors, warnings = validation.check_lat_lon_elev(station)
self.assertEqual(errors, [])
self.assertEqual(warnings, [])
station = {'location/X/value_d': 190.0,
'location/X/units_s': '',
'location/Y/value_d': -100.0,
'location/Y/units_s': '',
'location/Z/value_d': 0.0,
'location/Z/units_s': ''}
errors, warnings = validation.check_lat_lon_elev(station)
self.assertEqual(
errors,
['Channel longitude 190.0 not in range [-180,180]',
'Channel latitude -100.0 not in range [-90,90]'])
self.assertEqual(
warnings,
['No Station location/X/units_s value found.',
'No Station location/Y/units_s value found.',
'Channel elevation seems to be 0. Is this correct???',
'No Station location/Z/units_s value found.'])
station = {'location/X/value_d': 0,
'location/X/units_s': '',
'location/Y/value_d': 0,
'location/Y/units_s': None,
'location/Z/value_d': 0,
'location/Z/units_s': None}
errors, warnings = validation.check_lat_lon_elev(station)
self.assertEqual(errors, [])
self.assertEqual(
warnings,
['Channel longitude seems to be 0. Is this correct???',
'No Station location/X/units_s value found.',
'Channel latitude seems to be 0. Is this correct???',
'No Station location/Y/units_s value found.',
'Channel elevation seems to be 0. Is this correct???',
'No Station location/Z/units_s value found.'])
if __name__ == "__main__":
unittest.main()
| 45.507299
| 79
| 0.573061
| 2,915
| 24,938
| 4.656261
| 0.088508
| 0.044426
| 0.030649
| 0.023576
| 0.805717
| 0.772195
| 0.755691
| 0.730494
| 0.709718
| 0.688205
| 0
| 0.046658
| 0.314179
| 24,938
| 547
| 80
| 45.590494
| 0.746945
| 0.065001
| 0
| 0.635371
| 0
| 0
| 0.263384
| 0.074719
| 0
| 0
| 0
| 0
| 0.170306
| 1
| 0.026201
| false
| 0
| 0.021834
| 0
| 0.054585
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
316112461a73ab24feeec128ab47b4e65d3e5a2d
| 397
|
py
|
Python
|
03-types/none_example.py
|
johnehunt/Python3Intro
|
2a41ce488aac11bb3928ea81e57be1c2c8acdac2
|
[
"Apache-2.0"
] | 1
|
2020-11-03T19:46:25.000Z
|
2020-11-03T19:46:25.000Z
|
03-types/main7.py
|
johnehunt/PythonIntroDS
|
7e9d5c5494191cd68bc71e140df5fb30290a8da6
|
[
"Apache-2.0"
] | null | null | null |
03-types/main7.py
|
johnehunt/PythonIntroDS
|
7e9d5c5494191cd68bc71e140df5fb30290a8da6
|
[
"Apache-2.0"
] | null | null | null |
# Set up the winner variable to hold None
winner = None
print('winner:', winner)
print('winner is None:', winner is None)
print('winner is not None:', winner is not None)
print(type(winner))
# Now set winner to be True
print('Set winner to True')
winner = True
print('winner:', winner)
print('winner is None:', winner is None)
print('winner is not None:', winner is not None)
print(type(winner))
| 26.466667
| 48
| 0.715365
| 67
| 397
| 4.238806
| 0.223881
| 0.225352
| 0.183099
| 0.211268
| 0.676056
| 0.676056
| 0.676056
| 0.676056
| 0.676056
| 0.676056
| 0
| 0
| 0.153652
| 397
| 14
| 49
| 28.357143
| 0.845238
| 0.163728
| 0
| 0.727273
| 0
| 0
| 0.303951
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.818182
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
3177af86a51cb294531df1489c0c6c98d9e506bb
| 589
|
py
|
Python
|
categories/tests/test_generating_category.py
|
Rat-Shop/RatShop
|
e3878584fe8cd865bd00a36b0b039e543aaf85aa
|
[
"MIT"
] | null | null | null |
categories/tests/test_generating_category.py
|
Rat-Shop/RatShop
|
e3878584fe8cd865bd00a36b0b039e543aaf85aa
|
[
"MIT"
] | null | null | null |
categories/tests/test_generating_category.py
|
Rat-Shop/RatShop
|
e3878584fe8cd865bd00a36b0b039e543aaf85aa
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from ..models import ShopCategory
class GenerateCategoryTest(TestCase):
def test_generating(self):
response = self.client.get('/Test/')
self.assertEqual(response.status_code, 404)
ShopCategory.objects.create(name='Test', description='Test', image='-')
response = self.client.get('/Test/')
self.assertEqual(response.status_code, 200)
ShopCategory.objects.create(name='test', description='Test', image='-')
response = self.client.get('/Test/')
self.assertEqual(response.status_code, 200)
| 39.266667
| 79
| 0.684211
| 65
| 589
| 6.138462
| 0.384615
| 0.090226
| 0.135338
| 0.157895
| 0.716792
| 0.716792
| 0.716792
| 0.716792
| 0.716792
| 0.716792
| 0
| 0.018519
| 0.174873
| 589
| 14
| 80
| 42.071429
| 0.802469
| 0
| 0
| 0.416667
| 1
| 0
| 0.061121
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.083333
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
319eadb166e54acf07794df94d46fdb725e5ecbc
| 37,551
|
py
|
Python
|
tests/test_sns/test_publishing_boto3.py
|
symroe/moto
|
4e106995af6f2820273528fca8a4e9ee288690a5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_sns/test_publishing_boto3.py
|
symroe/moto
|
4e106995af6f2820273528fca8a4e9ee288690a5
|
[
"Apache-2.0"
] | 1
|
2022-03-07T07:39:03.000Z
|
2022-03-07T07:39:03.000Z
|
tests/test_sns/test_publishing_boto3.py
|
symroe/moto
|
4e106995af6f2820273528fca8a4e9ee288690a5
|
[
"Apache-2.0"
] | null | null | null |
import base64
import json
import boto3
import re
from freezegun import freeze_time
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
import pytest
from moto import mock_sns, mock_sqs, settings
from moto.core import ACCOUNT_ID
from moto.core.models import responses_mock
from moto.sns import sns_backends
MESSAGE_FROM_SQS_TEMPLATE = (
'{\n "Message": "%s",\n "MessageId": "%s",\n "Signature": "EXAMPLElDMXvB8r9R83tGoNn0ecwd5UjllzsvSvbItzfaMpN2nk5HVSw7XnOn/49IkxDKz8YrlH2qJXj2iZB0Zo2O71c4qQk1fMUDi3LGpij7RCW7AW9vYYsSqIKRnFS94ilu7NFhUzLiieYr4BKHpdTmdD6c0esKEYBpabxDSc=",\n "SignatureVersion": "1",\n "SigningCertURL": "https://sns.us-east-1.amazonaws.com/SimpleNotificationService-f3ecfb7224c7233fe7bb5f59f96de52f.pem",\n "Subject": "my subject",\n "Timestamp": "2015-01-01T12:00:00.000Z",\n "TopicArn": "arn:aws:sns:%s:'
+ ACCOUNT_ID
+ ':some-topic",\n "Type": "Notification",\n "UnsubscribeURL": "https://sns.us-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-east-1:'
+ ACCOUNT_ID
+ ':some-topic:2bcfbf39-05c3-41de-beaa-fcfcc21c8f55"\n}'
)
@mock_sqs
@mock_sns
def test_publish_to_sqs():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
sqs_conn = boto3.resource("sqs", region_name="us-east-1")
sqs_conn.create_queue(QueueName="test-queue")
conn.subscribe(
TopicArn=topic_arn,
Protocol="sqs",
Endpoint="arn:aws:sqs:us-east-1:{}:test-queue".format(ACCOUNT_ID),
)
message = "my message"
with freeze_time("2015-01-01 12:00:00"):
published_message = conn.publish(
TopicArn=topic_arn, Message=message, Subject="my subject"
)
published_message_id = published_message["MessageId"]
queue = sqs_conn.get_queue_by_name(QueueName="test-queue")
with freeze_time("2015-01-01 12:00:01"):
messages = queue.receive_messages(MaxNumberOfMessages=1)
expected = MESSAGE_FROM_SQS_TEMPLATE % (message, published_message_id, "us-east-1")
acquired_message = re.sub(
r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z",
"2015-01-01T12:00:00.000Z",
messages[0].body,
)
acquired_message.should.equal(expected)
@mock_sqs
@mock_sns
def test_publish_to_sqs_raw():
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(Name="some-topic")
sqs = boto3.resource("sqs", region_name="us-east-1")
queue = sqs.create_queue(QueueName="test-queue")
subscription = topic.subscribe(
Protocol="sqs", Endpoint=queue.attributes["QueueArn"]
)
subscription.set_attributes(
AttributeName="RawMessageDelivery", AttributeValue="true"
)
message = "my message"
with freeze_time("2015-01-01 12:00:00"):
topic.publish(Message=message)
with freeze_time("2015-01-01 12:00:01"):
messages = queue.receive_messages(MaxNumberOfMessages=1)
messages[0].body.should.equal(message)
@mock_sns
@mock_sqs
def test_publish_to_sqs_fifo():
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(
Name="topic.fifo",
Attributes={"FifoTopic": "true", "ContentBasedDeduplication": "true"},
)
sqs = boto3.resource("sqs", region_name="us-east-1")
queue = sqs.create_queue(
QueueName="queue.fifo",
Attributes={"FifoQueue": "true", "ContentBasedDeduplication": "true"},
)
topic.subscribe(Protocol="sqs", Endpoint=queue.attributes["QueueArn"])
topic.publish(Message="message", MessageGroupId="message_group_id")
@mock_sqs
@mock_sns
def test_publish_to_sqs_bad():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
sqs_conn = boto3.resource("sqs", region_name="us-east-1")
sqs_conn.create_queue(QueueName="test-queue")
conn.subscribe(
TopicArn=topic_arn,
Protocol="sqs",
Endpoint="arn:aws:sqs:us-east-1:{}:test-queue".format(ACCOUNT_ID),
)
message = "my message"
try:
# Test missing Value
conn.publish(
TopicArn=topic_arn,
Message=message,
MessageAttributes={"store": {"DataType": "String"}},
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
try:
# Test empty DataType (if the DataType field is missing entirely
# botocore throws an exception during validation)
conn.publish(
TopicArn=topic_arn,
Message=message,
MessageAttributes={
"store": {"DataType": "", "StringValue": "example_corp"}
},
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
try:
# Test empty Value
conn.publish(
TopicArn=topic_arn,
Message=message,
MessageAttributes={"store": {"DataType": "String", "StringValue": ""}},
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
try:
# Test Number DataType, with a non numeric value
conn.publish(
TopicArn=topic_arn,
Message=message,
MessageAttributes={"price": {"DataType": "Number", "StringValue": "error"}},
)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameterValue")
err.response["Error"]["Message"].should.equal(
"An error occurred (ParameterValueInvalid) when calling the Publish operation: Could not cast message attribute 'price' value to number."
)
@mock_sqs
@mock_sns
def test_publish_to_sqs_msg_attr_byte_value():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
sqs = boto3.resource("sqs", region_name="us-east-1")
queue = sqs.create_queue(QueueName="test-queue")
conn.subscribe(
TopicArn=topic_arn, Protocol="sqs", Endpoint=queue.attributes["QueueArn"]
)
queue_raw = sqs.create_queue(QueueName="test-queue-raw")
conn.subscribe(
TopicArn=topic_arn,
Protocol="sqs",
Endpoint=queue_raw.attributes["QueueArn"],
Attributes={"RawMessageDelivery": "true"},
)
conn.publish(
TopicArn=topic_arn,
Message="my message",
MessageAttributes={
"store": {"DataType": "Binary", "BinaryValue": b"\x02\x03\x04"}
},
)
message = json.loads(queue.receive_messages()[0].body)
message["Message"].should.equal("my message")
message["MessageAttributes"].should.equal(
{
"store": {
"Type": "Binary",
"Value": base64.b64encode(b"\x02\x03\x04").decode(),
}
}
)
message = queue_raw.receive_messages()[0]
message.body.should.equal("my message")
@mock_sqs
@mock_sns
def test_publish_to_sqs_msg_attr_number_type():
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(Name="test-topic")
sqs = boto3.resource("sqs", region_name="us-east-1")
queue = sqs.create_queue(QueueName="test-queue")
topic.subscribe(Protocol="sqs", Endpoint=queue.attributes["QueueArn"])
queue_raw = sqs.create_queue(QueueName="test-queue-raw")
topic.subscribe(
Protocol="sqs",
Endpoint=queue_raw.attributes["QueueArn"],
Attributes={"RawMessageDelivery": "true"},
)
topic.publish(
Message="test message",
MessageAttributes={"retries": {"DataType": "Number", "StringValue": "0"}},
)
message = json.loads(queue.receive_messages()[0].body)
message["Message"].should.equal("test message")
message["MessageAttributes"].should.equal(
{"retries": {"Type": "Number", "Value": 0}}
)
message = queue_raw.receive_messages()[0]
message.body.should.equal("test message")
@mock_sqs
@mock_sns
def test_publish_to_sqs_msg_attr_different_formats():
"""
Verify different Number-formats are processed correctly
"""
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(Name="test-topic")
sqs = boto3.resource("sqs", region_name="us-east-1")
sqs_client = boto3.client("sqs", region_name="us-east-1")
queue_raw = sqs.create_queue(QueueName="test-queue-raw")
topic.subscribe(
Protocol="sqs",
Endpoint=queue_raw.attributes["QueueArn"],
Attributes={"RawMessageDelivery": "true"},
)
topic.publish(
Message="test message",
MessageAttributes={
"integer": {"DataType": "Number", "StringValue": "123"},
"float": {"DataType": "Number", "StringValue": "12.34"},
"big-integer": {"DataType": "Number", "StringValue": "123456789"},
"big-float": {"DataType": "Number", "StringValue": "123456.789"},
},
)
messages_resp = sqs_client.receive_message(
QueueUrl=queue_raw.url, MessageAttributeNames=["All"]
)
message = messages_resp["Messages"][0]
message_attributes = message["MessageAttributes"]
message_attributes.should.equal(
{
"integer": {"DataType": "Number", "StringValue": "123"},
"float": {"DataType": "Number", "StringValue": "12.34"},
"big-integer": {"DataType": "Number", "StringValue": "123456789"},
"big-float": {"DataType": "Number", "StringValue": "123456.789"},
}
)
@mock_sns
def test_publish_sms():
client = boto3.client("sns", region_name="us-east-1")
result = client.publish(PhoneNumber="+15551234567", Message="my message")
result.should.contain("MessageId")
if not settings.TEST_SERVER_MODE:
sns_backend = sns_backends["us-east-1"]
sns_backend.sms_messages.should.have.key(result["MessageId"]).being.equal(
("+15551234567", "my message")
)
@mock_sns
def test_publish_bad_sms():
client = boto3.client("sns", region_name="us-east-1")
# Test invalid number
with pytest.raises(ClientError) as cm:
client.publish(PhoneNumber="NAA+15551234567", Message="my message")
cm.value.response["Error"]["Code"].should.equal("InvalidParameter")
cm.value.response["Error"]["Message"].should.contain("not meet the E164")
# Test to long ASCII message
with pytest.raises(ClientError) as cm:
client.publish(PhoneNumber="+15551234567", Message="a" * 1601)
cm.value.response["Error"]["Code"].should.equal("InvalidParameter")
cm.value.response["Error"]["Message"].should.contain("must be less than 1600")
@mock_sqs
@mock_sns
def test_publish_to_sqs_dump_json():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
sqs_conn = boto3.resource("sqs", region_name="us-east-1")
sqs_conn.create_queue(QueueName="test-queue")
conn.subscribe(
TopicArn=topic_arn,
Protocol="sqs",
Endpoint="arn:aws:sqs:us-east-1:{}:test-queue".format(ACCOUNT_ID),
)
message = json.dumps(
{
"Records": [
{
"eventVersion": "2.0",
"eventSource": "aws:s3",
"s3": {"s3SchemaVersion": "1.0"},
}
]
},
sort_keys=True,
)
with freeze_time("2015-01-01 12:00:00"):
published_message = conn.publish(
TopicArn=topic_arn, Message=message, Subject="my subject"
)
published_message_id = published_message["MessageId"]
queue = sqs_conn.get_queue_by_name(QueueName="test-queue")
with freeze_time("2015-01-01 12:00:01"):
messages = queue.receive_messages(MaxNumberOfMessages=1)
escaped = message.replace('"', '\\"')
expected = MESSAGE_FROM_SQS_TEMPLATE % (escaped, published_message_id, "us-east-1")
acquired_message = re.sub(
r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z",
"2015-01-01T12:00:00.000Z",
messages[0].body,
)
acquired_message.should.equal(expected)
@mock_sqs
@mock_sns
def test_publish_to_sqs_in_different_region():
conn = boto3.client("sns", region_name="us-west-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
sqs_conn = boto3.resource("sqs", region_name="us-west-2")
sqs_conn.create_queue(QueueName="test-queue")
conn.subscribe(
TopicArn=topic_arn,
Protocol="sqs",
Endpoint="arn:aws:sqs:us-west-2:{}:test-queue".format(ACCOUNT_ID),
)
message = "my message"
with freeze_time("2015-01-01 12:00:00"):
published_message = conn.publish(
TopicArn=topic_arn, Message=message, Subject="my subject"
)
published_message_id = published_message["MessageId"]
queue = sqs_conn.get_queue_by_name(QueueName="test-queue")
with freeze_time("2015-01-01 12:00:01"):
messages = queue.receive_messages(MaxNumberOfMessages=1)
expected = MESSAGE_FROM_SQS_TEMPLATE % (message, published_message_id, "us-west-1")
acquired_message = re.sub(
r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z",
"2015-01-01T12:00:00.000Z",
messages[0].body,
)
acquired_message.should.equal(expected)
@freeze_time("2013-01-01")
@mock_sns
def test_publish_to_http():
def callback(request):
request.headers["Content-Type"].should.equal("text/plain; charset=UTF-8")
json.loads.when.called_with(request.body.decode()).should_not.throw(Exception)
return 200, {}, ""
responses_mock.add_callback(
method="POST", url="http://example.com/foobar", callback=callback
)
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
conn.subscribe(
TopicArn=topic_arn, Protocol="http", Endpoint="http://example.com/foobar"
)
conn.publish(TopicArn=topic_arn, Message="my message", Subject="my subject")
if not settings.TEST_SERVER_MODE:
sns_backend = sns_backends["us-east-1"]
sns_backend.topics[topic_arn].sent_notifications.should.have.length_of(1)
notification = sns_backend.topics[topic_arn].sent_notifications[0]
_, msg, subject, _, _ = notification
msg.should.equal("my message")
subject.should.equal("my subject")
@mock_sqs
@mock_sns
def test_publish_subject():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
sqs_conn = boto3.resource("sqs", region_name="us-east-1")
sqs_conn.create_queue(QueueName="test-queue")
conn.subscribe(
TopicArn=topic_arn,
Protocol="sqs",
Endpoint="arn:aws:sqs:us-east-1:{}:test-queue".format(ACCOUNT_ID),
)
message = "my message"
subject1 = "test subject"
subject2 = "test subject" * 20
with freeze_time("2015-01-01 12:00:00"):
conn.publish(TopicArn=topic_arn, Message=message, Subject=subject1)
# Just that it doesnt error is a pass
try:
with freeze_time("2015-01-01 12:00:00"):
conn.publish(TopicArn=topic_arn, Message=message, Subject=subject2)
except ClientError as err:
err.response["Error"]["Code"].should.equal("InvalidParameter")
else:
raise RuntimeError("Should have raised an InvalidParameter exception")
@mock_sqs
@mock_sns
def test_publish_null_subject():
conn = boto3.client("sns", region_name="us-east-1")
conn.create_topic(Name="some-topic")
response = conn.list_topics()
topic_arn = response["Topics"][0]["TopicArn"]
sqs_conn = boto3.resource("sqs", region_name="us-east-1")
sqs_conn.create_queue(QueueName="test-queue")
conn.subscribe(
TopicArn=topic_arn,
Protocol="sqs",
Endpoint="arn:aws:sqs:us-east-1:{}:test-queue".format(ACCOUNT_ID),
)
message = "my message"
with freeze_time("2015-01-01 12:00:00"):
conn.publish(TopicArn=topic_arn, Message=message)
queue = sqs_conn.get_queue_by_name(QueueName="test-queue")
with freeze_time("2015-01-01 12:00:01"):
messages = queue.receive_messages(MaxNumberOfMessages=1)
acquired_message = json.loads(messages[0].body)
acquired_message["Message"].should.equal(message)
acquired_message.shouldnt.have.key("Subject")
@mock_sns
def test_publish_message_too_long():
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(Name="some-topic")
with pytest.raises(ClientError):
topic.publish(Message="".join(["." for i in range(0, 262145)]))
# message short enough - does not raise an error
topic.publish(Message="".join(["." for i in range(0, 262144)]))
@mock_sns
def test_publish_fifo_needs_group_id():
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(
Name="topic.fifo",
Attributes={"FifoTopic": "true", "ContentBasedDeduplication": "true"},
)
with pytest.raises(
ClientError, match="The request must contain the parameter MessageGroupId"
):
topic.publish(Message="message")
# message group included - OK
topic.publish(Message="message", MessageGroupId="message_group_id")
@mock_sns
@mock_sqs
def test_publish_group_id_to_non_fifo():
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(Name="topic")
with pytest.raises(
ClientError,
match="The request include parameter that is not valid for this queue type",
):
topic.publish(Message="message", MessageGroupId="message_group_id")
# message group not included - OK
topic.publish(Message="message")
def _setup_filter_policy_test(filter_policy):
sns = boto3.resource("sns", region_name="us-east-1")
topic = sns.create_topic(Name="some-topic")
sqs = boto3.resource("sqs", region_name="us-east-1")
queue = sqs.create_queue(QueueName="test-queue")
subscription = topic.subscribe(
Protocol="sqs", Endpoint=queue.attributes["QueueArn"]
)
subscription.set_attributes(
AttributeName="FilterPolicy", AttributeValue=json.dumps(filter_policy)
)
return topic, queue
@mock_sqs
@mock_sns
def test_filtering_exact_string():
topic, queue = _setup_filter_policy_test({"store": ["example_corp"]})
topic.publish(
Message="match",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[{"store": {"Type": "String", "Value": "example_corp"}}]
)
@mock_sqs
@mock_sns
def test_filtering_exact_string_multiple_message_attributes():
topic, queue = _setup_filter_policy_test({"store": ["example_corp"]})
topic.publish(
Message="match",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"},
"event": {"DataType": "String", "StringValue": "order_cancelled"},
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[
{
"store": {"Type": "String", "Value": "example_corp"},
"event": {"Type": "String", "Value": "order_cancelled"},
}
]
)
@mock_sqs
@mock_sns
def test_filtering_exact_string_OR_matching():
topic, queue = _setup_filter_policy_test(
{"store": ["example_corp", "different_corp"]}
)
topic.publish(
Message="match example_corp",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"}
},
)
topic.publish(
Message="match different_corp",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "different_corp"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match example_corp", "match different_corp"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[
{"store": {"Type": "String", "Value": "example_corp"}},
{"store": {"Type": "String", "Value": "different_corp"}},
]
)
@mock_sqs
@mock_sns
def test_filtering_exact_string_AND_matching_positive():
topic, queue = _setup_filter_policy_test(
{"store": ["example_corp"], "event": ["order_cancelled"]}
)
topic.publish(
Message="match example_corp order_cancelled",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"},
"event": {"DataType": "String", "StringValue": "order_cancelled"},
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match example_corp order_cancelled"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[
{
"store": {"Type": "String", "Value": "example_corp"},
"event": {"Type": "String", "Value": "order_cancelled"},
}
]
)
@mock_sqs
@mock_sns
def test_filtering_exact_string_AND_matching_no_match():
topic, queue = _setup_filter_policy_test(
{"store": ["example_corp"], "event": ["order_cancelled"]}
)
topic.publish(
Message="match example_corp order_accepted",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"},
"event": {"DataType": "String", "StringValue": "order_accepted"},
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_exact_string_no_match():
topic, queue = _setup_filter_policy_test({"store": ["example_corp"]})
topic.publish(
Message="no match",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "different_corp"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_exact_string_no_attributes_no_match():
topic, queue = _setup_filter_policy_test({"store": ["example_corp"]})
topic.publish(Message="no match")
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_exact_number_int():
topic, queue = _setup_filter_policy_test({"price": [100]})
topic.publish(
Message="match",
MessageAttributes={"price": {"DataType": "Number", "StringValue": "100"}},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([{"price": {"Type": "Number", "Value": 100}}])
@mock_sqs
@mock_sns
def test_filtering_exact_number_float():
topic, queue = _setup_filter_policy_test({"price": [100.1]})
topic.publish(
Message="match",
MessageAttributes={"price": {"DataType": "Number", "StringValue": "100.1"}},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([{"price": {"Type": "Number", "Value": 100.1}}])
@mock_sqs
@mock_sns
def test_filtering_exact_number_float_accuracy():
topic, queue = _setup_filter_policy_test({"price": [100.123456789]})
topic.publish(
Message="match",
MessageAttributes={
"price": {"DataType": "Number", "StringValue": "100.1234561"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[{"price": {"Type": "Number", "Value": 100.1234561}}]
)
@mock_sqs
@mock_sns
def test_filtering_exact_number_no_match():
topic, queue = _setup_filter_policy_test({"price": [100]})
topic.publish(
Message="no match",
MessageAttributes={"price": {"DataType": "Number", "StringValue": "101"}},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_exact_number_with_string_no_match():
topic, queue = _setup_filter_policy_test({"price": [100]})
topic.publish(
Message="no match",
MessageAttributes={"price": {"DataType": "String", "StringValue": "100"}},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_string_array_match():
topic, queue = _setup_filter_policy_test(
{"customer_interests": ["basketball", "baseball"]}
)
topic.publish(
Message="match",
MessageAttributes={
"customer_interests": {
"DataType": "String.Array",
"StringValue": json.dumps(["basketball", "rugby"]),
}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[
{
"customer_interests": {
"Type": "String.Array",
"Value": json.dumps(["basketball", "rugby"]),
}
}
]
)
@mock_sqs
@mock_sns
def test_filtering_string_array_no_match():
topic, queue = _setup_filter_policy_test({"customer_interests": ["baseball"]})
topic.publish(
Message="no_match",
MessageAttributes={
"customer_interests": {
"DataType": "String.Array",
"StringValue": json.dumps(["basketball", "rugby"]),
}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_string_array_with_number_match():
topic, queue = _setup_filter_policy_test({"price": [100, 500]})
topic.publish(
Message="match",
MessageAttributes={
"price": {"DataType": "String.Array", "StringValue": json.dumps([100, 50])}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[{"price": {"Type": "String.Array", "Value": json.dumps([100, 50])}}]
)
@mock_sqs
@mock_sns
def test_filtering_string_array_with_number_float_accuracy_match():
topic, queue = _setup_filter_policy_test({"price": [100.123456789, 500]})
topic.publish(
Message="match",
MessageAttributes={
"price": {
"DataType": "String.Array",
"StringValue": json.dumps([100.1234561, 50]),
}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[{"price": {"Type": "String.Array", "Value": json.dumps([100.1234561, 50])}}]
)
@mock_sqs
@mock_sns
# this is the correct behavior from SNS
def test_filtering_string_array_with_number_no_array_match():
topic, queue = _setup_filter_policy_test({"price": [100, 500]})
topic.publish(
Message="match",
MessageAttributes={"price": {"DataType": "String.Array", "StringValue": "100"}},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[{"price": {"Type": "String.Array", "Value": "100"}}]
)
@mock_sqs
@mock_sns
def test_filtering_string_array_with_number_no_match():
topic, queue = _setup_filter_policy_test({"price": [500]})
topic.publish(
Message="no_match",
MessageAttributes={
"price": {"DataType": "String.Array", "StringValue": json.dumps([100, 50])}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
# this is the correct behavior from SNS
def test_filtering_string_array_with_string_no_array_no_match():
topic, queue = _setup_filter_policy_test({"price": [100]})
topic.publish(
Message="no_match",
MessageAttributes={
"price": {"DataType": "String.Array", "StringValue": "one hundred"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_attribute_key_exists_match():
topic, queue = _setup_filter_policy_test({"store": [{"exists": True}]})
topic.publish(
Message="match",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[{"store": {"Type": "String", "Value": "example_corp"}}]
)
@mock_sqs
@mock_sns
def test_filtering_attribute_key_exists_no_match():
topic, queue = _setup_filter_policy_test({"store": [{"exists": True}]})
topic.publish(
Message="no match",
MessageAttributes={
"event": {"DataType": "String", "StringValue": "order_cancelled"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_attribute_key_not_exists_match():
topic, queue = _setup_filter_policy_test({"store": [{"exists": False}]})
topic.publish(
Message="match",
MessageAttributes={
"event": {"DataType": "String", "StringValue": "order_cancelled"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[{"event": {"Type": "String", "Value": "order_cancelled"}}]
)
@mock_sqs
@mock_sns
def test_filtering_attribute_key_not_exists_no_match():
topic, queue = _setup_filter_policy_test({"store": [{"exists": False}]})
topic.publish(
Message="no match",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"}
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
@mock_sqs
@mock_sns
def test_filtering_all_AND_matching_match():
topic, queue = _setup_filter_policy_test(
{
"store": [{"exists": True}],
"event": ["order_cancelled"],
"customer_interests": ["basketball", "baseball"],
"price": [100],
}
)
topic.publish(
Message="match",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"},
"event": {"DataType": "String", "StringValue": "order_cancelled"},
"customer_interests": {
"DataType": "String.Array",
"StringValue": json.dumps(["basketball", "rugby"]),
},
"price": {"DataType": "Number", "StringValue": "100"},
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal(["match"])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal(
[
{
"store": {"Type": "String", "Value": "example_corp"},
"event": {"Type": "String", "Value": "order_cancelled"},
"customer_interests": {
"Type": "String.Array",
"Value": json.dumps(["basketball", "rugby"]),
},
"price": {"Type": "Number", "Value": 100},
}
]
)
@mock_sqs
@mock_sns
def test_filtering_all_AND_matching_no_match():
topic, queue = _setup_filter_policy_test(
{
"store": [{"exists": True}],
"event": ["order_cancelled"],
"customer_interests": ["basketball", "baseball"],
"price": [100],
"encrypted": [False],
}
)
topic.publish(
Message="no match",
MessageAttributes={
"store": {"DataType": "String", "StringValue": "example_corp"},
"event": {"DataType": "String", "StringValue": "order_cancelled"},
"customer_interests": {
"DataType": "String.Array",
"StringValue": json.dumps(["basketball", "rugby"]),
},
"price": {"DataType": "Number", "StringValue": "100"},
},
)
messages = queue.receive_messages(MaxNumberOfMessages=5)
message_bodies = [json.loads(m.body)["Message"] for m in messages]
message_bodies.should.equal([])
message_attributes = [json.loads(m.body)["MessageAttributes"] for m in messages]
message_attributes.should.equal([])
| 33.527679
| 496
| 0.644963
| 4,282
| 37,551
| 5.458431
| 0.078234
| 0.034356
| 0.021392
| 0.029949
| 0.860095
| 0.841997
| 0.831558
| 0.813332
| 0.802464
| 0.764472
| 0
| 0.025674
| 0.204415
| 37,551
| 1,119
| 497
| 33.557641
| 0.756686
| 0.0147
| 0
| 0.644664
| 0
| 0.006601
| 0.215126
| 0.023019
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048405
| false
| 0
| 0.013201
| 0
| 0.063806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
31da1f1909ef53113c7a7734a4998f1213e36106
| 11,601
|
py
|
Python
|
featurizer/functions/split.py
|
zmm18/featurizer
|
bb30ab8656af4bdd85b20bb85b481aeaef60c528
|
[
"Apache-2.0"
] | null | null | null |
featurizer/functions/split.py
|
zmm18/featurizer
|
bb30ab8656af4bdd85b20bb85b481aeaef60c528
|
[
"Apache-2.0"
] | null | null | null |
featurizer/functions/split.py
|
zmm18/featurizer
|
bb30ab8656af4bdd85b20bb85b481aeaef60c528
|
[
"Apache-2.0"
] | 1
|
2022-03-26T10:35:24.000Z
|
2022-03-26T10:35:24.000Z
|
# -*- coding: utf-8 -*-
# Copyright StateOfTheArt.quant.
#
# * Commercial Usage: please contact allen.across@gmail.com
# * Non-Commercial Usage:
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import pandas as pd
import numpy as np
from functools import reduce
import pdb
def split(tensor, window=5, step=1, offset=0, keep_tail=True):
"""
:param tensor: numpy data
:param window: int, size of window default=5
:param step: int, size between two windows default=1
:param offset: int, first window offset default=0
:param keep_tail: Boolean , {True : save tail of data,; False : possible not save tail of data} default True
:return: list within numpy data
Examples::
>>> data = np.array([1,2,3,4,5,6,7,8,9,10])
>>> # keep_tail is True
>>> split_list = split(data, window=4, step=5, offset=0, keep_tail=True)
>>> split_list # [array([1]), array([2, 3, 4, 5]), array([ 7, 8, 9, 10])]
>>> # keep_tail is False
>>> split_list = split(data, window=4, step=5, offset=0, keep_tail=False)
>>> split_list # [array([1, 2, 3, 4]), array([6, 7, 8, 9]), array([10])]
"""
window, step, offset = int(window), int(step), int(offset)
sample_list = []
index = int((len(tensor) - window - offset) / step) + 1 #total steps
remain = int(len(tensor) - window - offset - (index - 1) * step)
#print('remain : ', remain)
if keep_tail:
start_index = remain+offset#
if remain > 0:
sample_list.append(tensor[offset:offset+remain])
for i in range(index):
window_data = tensor[start_index + i * step : start_index + window + i * step]
sample_list.append(window_data)
else:
start_index = offset
for i in range(index):
window_data = tensor[start_index + i * step : start_index + window + i * step]
sample_list.append(window_data)
if remain > 0:
sample_list.append(tensor[-remain:])
return sample_list
def split3d(tensor, window=5, step=1, offset=0, keep_tail=True, dim=1):
"""
:param tensor: numpy data
:param window: int, size of window default=5
:param step: int, size between two windows default=1
:param offset: int, first window offset default=0
:param keep_tail: Boolean , {True : save tail of data,; False : possible not save tail of data} default True
:return: list within numpy data
Examples::
>>> data = np.array([1,2,3,4,5,6,7,8,9,10])
>>> # keep_tail is True
>>> split_list = split(data, window=4, step=5, offset=0, keep_tail=True)
>>> split_list # [array([1]), array([2, 3, 4, 5]), array([ 7, 8, 9, 10])]
>>> # keep_tail is False
>>> split_list = split(data, window=4, step=5, offset=0, keep_tail=False)
>>> split_list # [array([1, 2, 3, 4]), array([6, 7, 8, 9]), array([10])]
"""
window, step, offset = int(window), int(step), int(offset)
sample_list = []
lenght = tensor.shape[dim]
index = int((lenght - window - offset) / step) + 1 #total steps
remain = int(lenght - window - offset - (index - 1) * step)
#print('remain : ', remain)
if keep_tail:
start_index = remain+offset#
if remain > 0:
sample_list.append(tensor[:,offset:offset+remain])
for i in range(index):
window_data = tensor[:,start_index + i * step : start_index + window + i * step]
sample_list.append(window_data)
else:
start_index = offset
for i in range(index):
window_data = tensor[:,start_index + i * step : start_index + window + i * step]
sample_list.append(window_data)
if remain > 0:
sample_list.append(tensor[:,-remain:])
return sample_list
def split_sample(tensor, window=5, step=1, offset=0, keep_tail=True, merge_remain=False):
"""
:param tensor: numpy data
:param window: int, size of window default=5
:param step: int, size between two windows default=1
:param offset: int, first window offset default=0
:param keep_tail: Boolean , {True : save tail of data,; False : possible not save tail of data} default True
:param merge_remain: Boolean , {True: and if keep_tail is True, the first sample include remain sample,
elif keep_tail is Flase, the last sample include remain sample.
Flase: the sample decide by value of keep_tail
}
:return: list within numpy data
Examples::
>>> # use to split data set
>>> import numpy as np
>>> data = np.array(range(1, 11))
>>> window_train = 5
>>> window_test = 3
>>> # keep_tail=False, merge_remain=False
>>> train_data = split_sample(data, window=window_train, step=window_test, offset=0, keep_tail=False, merge_remain=False)
>>> train_data
[array([1, 2, 3, 4, 5]), array([4, 5, 6, 7, 8])]
>>> test_data = split_sample(data, window=window_test, step=window_test, offset=window_train, keep_tail=False, merge_remain=True)
[array([ 6, 7, 8, 9, 10])]
>>> # use to split sample
>>> data = np.array(range(30)).reshape(6, 5)
>>> # keep_tail=True, merge_remain=False
>>> sample1 = split_sample(data, window=3, step=2, offset=0, keep_tail=True, merge_remain=False)
>>> sample1
[array([[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]]),
array([[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24],
[25, 26, 27, 28, 29]])]
>>> # keep_tail=False, merge_remain=False
>>> sample2 = split_sample(data, window=3, step=2, offset=0, keep_tail=False, merge_remain=False)
>>> sample2
[array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14]]),
array([[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24]])]
"""
index = int((len(tensor) - window - offset) / step) + 1
remain = len(tensor) - window - offset - (index - 1) * step
sample_list = split(tensor, window=window, step=step, offset=offset, keep_tail=keep_tail)
if remain:
if keep_tail:
idx = 1
else:
idx = -1
if not merge_remain:
return sample_list[idx:] if idx==1 else sample_list[:idx]
else:
if isinstance(tensor, torch.Tensor):
cat_func = torch.cat
else:
cat_func = np.concateneate
sample_list[idx-1] = cat_func([sample_list[idx-1], sample_list[idx]])
del sample_list[idx]
return sample_list
else:
return sample_list
def split_sample3d(tensor, window=5, step=1, offset=0, keep_tail=True, merge_remain=False, dim=1):
"""
:param tensor: numpy data
:param window: int, size of window default=5
:param step: int, size between two windows default=1
:param offset: int, first window offset default=0
:param keep_tail: Boolean , {True : save tail of data,; False : possible not save tail of data} default True
:param merge_remain: Boolean , {True: and if keep_tail is True, the first sample include remain sample,
elif keep_tail is Flase, the last sample include remain sample.
Flase: the sample decide by value of keep_tail
}
:return: list within numpy data
Examples::
>>> # use to split data set
>>> import numpy as np
>>> data = np.array(range(1, 11))
>>> window_train = 5
>>> window_test = 3
>>> # keep_tail=False, merge_remain=False
>>> train_data = split_sample(data, window=window_train, step=window_test, offset=0, keep_tail=False, merge_remain=False)
>>> train_data
[array([1, 2, 3, 4, 5]), array([4, 5, 6, 7, 8])]
>>> test_data = split_sample(data, window=window_test, step=window_test, offset=window_train, keep_tail=False, merge_remain=True)
[array([ 6, 7, 8, 9, 10])]
>>> # use to split sample
>>> data = np.array(range(30)).reshape(6, 5)
>>> # keep_tail=True, merge_remain=False
>>> sample1 = split_sample(data, window=3, step=2, offset=0, keep_tail=True, merge_remain=False)
>>> sample1
[array([[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19]]),
array([[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24],
[25, 26, 27, 28, 29]])]
>>> # keep_tail=False, merge_remain=False
>>> sample2 = split_sample(data, window=3, step=2, offset=0, keep_tail=False, merge_remain=False)
>>> sample2
[array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14]]),
array([[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24]])]
"""
lenght = tensor.shape[dim]
index = int((lenght - window - offset) / step) + 1
remain = lenght - window - offset - (index - 1) * step
sample_list = split3d(tensor, window=window, step=step, offset=offset, keep_tail=keep_tail)
if remain:
if keep_tail:
idx = 1
else:
idx = -1
if not merge_remain:
return sample_list[idx:] if idx==1 else sample_list[:idx]
else:
#pdb.set_trace()
if isinstance(tensor, torch.Tensor):
cat_func = torch.cat
else:
cat_func = np.concatenate
sample_list[idx-1] = cat_func([sample_list[idx-1], sample_list[idx]],dim)
del sample_list[idx]
return sample_list
else:
return sample_list
if __name__ == '__main__':
np.random.seed(520)
data2d_np = np.random.randn(10,3)
data2d_ts = torch.tensor(data2d_np, dtype=torch.float32)
data_list_by_split_np = split(data2d_np)
data_list_by_split_ts = split(data2d_ts)
data3d_np = np.random.randint(1,5,(2,10,3))
data3d_ts = torch.tensor(data3d_np, dtype=torch.int32)
data_list_by_split3d_np = split3d(data3d_np)
data_list_by_split3d_ts = split3d(data3d_ts)
#
data_sample_list_np = split_sample(data2d_np, window=3)
data_sample_list_ts = split_sample(data2d_ts, window=3)
data_sample3d_list_np = split_sample3d(data3d_np, window=3, step=2, merge_remain=False)
data_sample3d_list_ts = split_sample3d(data3d_ts, window=3, step=2, merge_remain=False)
data_sample3d_list_np2 = split_sample3d(data3d_np, window=3, step=2, merge_remain=True)
data_sample3d_list_ts2 = split_sample3d(data3d_ts, window=3, step=2, merge_remain=True)
| 42.339416
| 137
| 0.578226
| 1,599
| 11,601
| 4.053158
| 0.1207
| 0.054313
| 0.0395
| 0.032402
| 0.831816
| 0.827959
| 0.826107
| 0.81222
| 0.795865
| 0.795865
| 0
| 0.057227
| 0.295061
| 11,601
| 273
| 138
| 42.494505
| 0.735265
| 0.546591
| 0
| 0.628571
| 0
| 0
| 0.001695
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038095
| false
| 0
| 0.047619
| 0
| 0.161905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
31ddac78a4dbc91b41bde239623d97b84f8bcdaa
| 15,871
|
py
|
Python
|
pollination_ppl_fed.py
|
richpsharp/raster_calculations
|
28b18c34f49c2c275c46e332d7021a27703053cd
|
[
"Apache-2.0"
] | 2
|
2020-01-07T23:46:05.000Z
|
2021-05-26T00:31:31.000Z
|
pollination_ppl_fed.py
|
richpsharp/raster_calculations
|
28b18c34f49c2c275c46e332d7021a27703053cd
|
[
"Apache-2.0"
] | null | null | null |
pollination_ppl_fed.py
|
richpsharp/raster_calculations
|
28b18c34f49c2c275c46e332d7021a27703053cd
|
[
"Apache-2.0"
] | 3
|
2020-09-06T21:05:19.000Z
|
2022-01-28T22:08:35.000Z
|
"""These calculations are for the science paper."""
import sys
import os
import logging
import multiprocessing
import raster_calculations_core
from osgeo import gdal
import taskgraph
WORKSPACE_DIR = 'raster_expression_workspace'
NCPUS = multiprocessing.cpu_count()
try:
os.makedirs(WORKSPACE_DIR)
except OSError:
pass
logging.basicConfig(
level=logging.DEBUG,
format=(
'%(asctime)s (%(relativeCreated)d) %(levelname)s %(name)s'
' [%(funcName)s:%(lineno)d] %(message)s'),
stream=sys.stdout)
LOGGER = logging.getLogger(__name__)
gdal.SetCacheMax(2**30)
def main():
"""Write your expression here."""
#coarse average of pollination dependence
# calculation_list = [
# {
# 'expression': 'polldep/total',
# 'symbol_to_path_map': {
# 'polldep': r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\monfreda_2008_yield_poll_dep_en_10km.tif",
# 'total':r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\monfreda_2008_yield_total_en_10km.tif"
# },
# 'target_nodata': -9999,
# 'default_nan': -9999,
# 'target_raster_path': "prop_poll_dep_10km_en.tif",
# 'target_pixel_size': (0.08333300100000000377, -0.08333300100000000377),
# },
# {
# 'expression': 'polldep/total',
# 'symbol_to_path_map': {
# 'polldep': r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\monfreda_2008_yield_poll_dep_fo_10km.tif",
# 'total':r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\monfreda_2008_yield_total_fo_10km.tif"
# },
# 'target_nodata': -9999,
# 'default_nan': -9999,
# 'target_raster_path': "prop_poll_dep_10km_fo.tif",
# 'target_pixel_size': (0.08333300100000000377, -0.08333300100000000377),
# },
# {
# 'expression': 'polldep/total',
# 'symbol_to_path_map': {
# 'polldep': r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\monfreda_2008_yield_poll_dep_va_10km.tif",
# 'total':r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\monfreda_2008_yield_total_va_10km.tif"
# },
# 'target_nodata': -9999,
# 'default_nan': -9999,
# 'target_raster_path': "prop_poll_dep_10km_va.tif",
# 'target_pixel_size': (0.08333300100000000377, -0.08333300100000000377),
# },
#
# ]
#
# for calculation in calculation_list:
# raster_calculations_core.evaluate_calculation(
# calculation, TASK_GRAPH, WORKSPACE_DIR)
#
# TASK_GRAPH.join()
# TASK_GRAPH.close()
#
# return
# single_expression = {
# 'expression': '(va*(486980) + en*(3319921) + fo*(132654)) / (486980 + 3319921 + 132654)',
# 'symbol_to_path_map': {
# 'va': r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\prop_poll_dep_10km_va.tif",
# 'en': r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\prop_poll_dep_10km_en.tif",
# 'fo': r"C:\Users\Becky\Documents\raster_calculations\ag_work\pollination\prop_poll_dep_10km_fo.tif",
# },
# 'target_nodata': -9999,
# 'default_nan': -9999,
# 'target_raster_path': "prop_poll_dep_10km_weightedavg.tif",
# 'target_pixel_size': (0.08333333333333332871, -0.08333333333333332871),
# }
#
# raster_calculations_core.evaluate_calculation(
# single_expression, TASK_GRAPH, WORKSPACE_DIR)
#
# TASK_GRAPH.join()
# TASK_GRAPH.close()
#
# return
raster_calculation_list = [
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_va_10s_cur_md5_8e327c260369864d5a38e03279574fb2.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_en_10s_cur_md5_a33bd27cb092807455812b6474b88ea3.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_fo_10s_cur_md5_f0660f3e3123ed1b64a502046e4246bd.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_potential_10s_cur.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_va_10s_ssp1_md5_dd661fc2b46dcaae0291dc8b095162af.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_en_10s_ssp1_md5_e38c0f651fd99cc5823c4d4609f3605a.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_fo_10s_ssp1_md5_259247bc5e53dfa4e299f84fcdd970f0.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_potential_10s_ssp1.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_va_10s_ssp3_md5_9d199ecc7cae7875246fb6c417d36c25.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_en_10s_ssp3_md5_c5a582a699913836740b4d8eebff44cc.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_fo_10s_ssp3_md5_8ebf271cbdcd53561b0457de9dc14ff7.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_potential_10s_ssp3.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_va_10s_ssp5_md5_96374887d44c5f2bd02f1a59bc04081b.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_en_10s_ssp5_md5_e97f7cd3bb6d92944f234596718cb9c9.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_potential_fo_10s_ssp5_md5_15dc8849799d0413ab01a842860515cc.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_potential_10s_ssp5.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_va_10s_cur_md5_c8035666f5a6e5c32fb290df989183e2.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_en_10s_cur_md5_d3e8bc025523d74cd4258f9f954b3cf4.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_fo_10s_cur_md5_857aa9c09357ad6614e33f23710ea380.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_deficit_10s_cur.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_va_10s_ssp1_md5_d9b620961bfe56b7bfb52ee67babe364.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_en_10s_ssp1_md5_2ae004b2e3559cdfc53ed754bfd6b33e.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_fo_10s_ssp1_md5_08c28442f699f35ab903b23480945785.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_deficit_10s_ssp1.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_va_10s_ssp3_md5_0a6744d0b69ec295292a84c8383290d5.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_en_10s_ssp3_md5_10ce2f30db2ac4a97266cfd075e67fa9.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_fo_10s_ssp3_md5_19a2a1423c028e883a477e6b73524da5.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_deficit_10s_ssp3.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_va_10s_ssp5_md5_33e0cd5f3a846d1532a44c56c2d4ade5.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_en_10s_ssp5_md5_b5fb16243689850078961e0228f774f2.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/prod_poll_dep_unrealized_fo_10s_ssp5_md5_155e5e1aab3c226a693973efc41400fc.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_deficit_10s_ssp5.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_va_30s_cur_md5_5dc3b32361e73deefe0c1d3405d1887b.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_en_30s_cur_md5_a0216f9f217a5960179720585720d4fa.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_fo_30s_cur_md5_01077b8ee4bae46e1d07c23728d740fc.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_nut_req_30s_cur.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_en_30s_ssp1_md5_2dec3f715e60666797c3ec170ee86cce.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_en_30s_ssp1_md5_2dec3f715e60666797c3ec170ee86cce.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_fo_30s_ssp1_md5_655aa774ebd352d5bf82336c4c4a72ab.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_nut_req_30s_ssp1.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_va_30s_ssp3_md5_024b2aa9c2e71e72c246c34b71b75bf8.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_en_30s_ssp3_md5_2cd38b2e5b32238f24b635dfdd70cf22.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_fo_30s_ssp3_md5_3f8b935a55836c44f7912f5520699179.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_nut_req_30s_ssp3.tif",
'build_overview': True,
},
{
'expression': '(va/486980 + en/3319921 + fo/132654) / 3',
'symbol_to_path_map': {
'va': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_va_30s_ssp5_md5_4267bfdd9392dff1d8cfd30f504567d9.tif',
'en': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_en_30s_ssp5_md5_279c0ec49113c0036d3dc8c9ef387469.tif',
'fo': 'https://storage.googleapis.com/ipbes-natcap-ecoshard-data-for-publication/nut_req_fo_30s_ssp5_md5_8b1dfa322e4e9202711e8057a34c508e.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_nut_req_30s_ssp5.tif",
'build_overview': True,
},
]
for calculation in raster_calculation_list:
raster_calculations_core.evaluate_calculation(
calculation, TASK_GRAPH, WORKSPACE_DIR)
TASK_GRAPH.join()
derived_raster_calculation_list = [
{
'expression': '(potential-deficit)/potential',
'symbol_to_path_map': {
'potential': 'outputs/pollination_potential_10s_cur.tif',
'deficit': 'outputs/pollination_deficit_10s_cur.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_NC_10s_cur.tif",
'build_overview': True,
},
{
'expression': '(potential-deficit)/potential',
'symbol_to_path_map': {
'potential': 'outputs/pollination_potential_10s_ssp1.tif',
'deficit': 'outputs/pollination_deficit_10s_ssp1.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_NC_10s_ssp1.tif",
'build_overview': True,
},
{
'expression': '(potential-deficit)/potential',
'symbol_to_path_map': {
'potential': 'outputs/pollination_potential_10s_ssp3.tif',
'deficit': 'outputs/pollination_deficit_10s_ssp3.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_NC_10s_ssp3.tif",
'build_overview': True,
},
{
'expression': '(potential-deficit)/potential',
'symbol_to_path_map': {
'potential': 'outputs/pollination_potential_10s_ssp5.tif',
'deficit': 'outputs/pollination_deficit_10s_ssp5.tif',
},
'target_nodata': -1,
'target_raster_path': "outputs/pollination_NC_10s_ssp5.tif",
'build_overview': True,
},
]
for calculation in derived_raster_calculation_list:
raster_calculations_core.evaluate_calculation(
calculation, TASK_GRAPH, WORKSPACE_DIR)
TASK_GRAPH.join()
TASK_GRAPH.close()
if __name__ == '__main__':
TASK_GRAPH = taskgraph.TaskGraph(WORKSPACE_DIR, NCPUS, 5.0)
main()
| 52.379538
| 177
| 0.645139
| 1,683
| 15,871
| 5.710042
| 0.103387
| 0.044953
| 0.082414
| 0.093652
| 0.784599
| 0.776587
| 0.756191
| 0.750989
| 0.73642
| 0.73642
| 0
| 0.123919
| 0.234768
| 15,871
| 302
| 178
| 52.55298
| 0.667353
| 0.187638
| 0
| 0.333333
| 0
| 0.171429
| 0.654415
| 0.091011
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004762
| false
| 0.004762
| 0.033333
| 0
| 0.038095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9ed438f1151f331b20fc609246ea60f48eeef2ea
| 94,542
|
py
|
Python
|
menpowidgets/menpofitwidgets/base.py
|
apapaion/menpowidgets
|
237a39ddf4e65c57e8165f8a87f25a25f34d4698
|
[
"BSD-3-Clause"
] | null | null | null |
menpowidgets/menpofitwidgets/base.py
|
apapaion/menpowidgets
|
237a39ddf4e65c57e8165f8a87f25a25f34d4698
|
[
"BSD-3-Clause"
] | null | null | null |
menpowidgets/menpofitwidgets/base.py
|
apapaion/menpowidgets
|
237a39ddf4e65c57e8165f8a87f25a25f34d4698
|
[
"BSD-3-Clause"
] | null | null | null |
from menpo.base import MenpoMissingDependencyError
try:
import menpofit
except ImportError as e:
raise MenpoMissingDependencyError(e.name)
# Continue with imports if we have menpofit
from collections import OrderedDict
import numpy as np
import ipywidgets
import IPython.display as ipydisplay
import matplotlib.pyplot as plt
from menpo.base import name_of_callable
from menpo.image import MaskedImage
from menpo.image.base import _convert_patches_list_to_single_array
from menpofit.error import (euclidean_bb_normalised_error,
root_mean_square_bb_normalised_error)
from ..checks import check_n_parameters
from ..options import (SaveMatplotlibFigureOptionsWidget, RendererOptionsWidget,
ImageOptionsWidget, PatchOptionsWidget,
LandmarkOptionsWidget, LinearModelParametersWidget,
PlotMatplotlibOptionsWidget, AnimationOptionsWidget,
TextPrintWidget, Shape2DOptionsWidget)
from ..tools import LogoWidget
from ..style import map_styles_to_hex_colours
from ..utils import (render_patches, render_image,
extract_groups_labels_from_image)
from ..options import IterativeResultOptionsWidget
def visualize_aam(aam, n_shape_parameters=5, n_appearance_parameters=5,
mode='multiple', parameters_bounds=(-3.0, 3.0),
figure_size=(7, 7)):
r"""
Widget that allows the dynamic visualization of a multi-scale Active
Appearance Model.
Parameters
----------
aam : `menpofit.aam.HolisticAAM`
The multi-scale AAM to be visualized. Note that each level can have
different number of components.
n_shape_parameters : `int` or `list` of `int` or ``None``, optional
The number of principal components to be used for the shape parameters
sliders. If `int`, then the number of sliders per level is the minimum
between `n_parameters` and the number of active components per level.
If `list` of `int`, then a number of sliders is defined per level.
If ``None``, all the active components per level will have a slider.
n_appearance_parameters : `int` or `list` of `int` or ``None``, optional
The number of principal components to be used for the appearance
parameters sliders. If `int`, then the number of sliders per level is
the minimum between `n_parameters` and the number of active components
per level. If `list` of `int`, then a number of sliders is defined per
level. If ``None``, all the active components per level will have a
slider.
mode : ``{'single', 'multiple'}``, optional
If ``'single'``, then only a single slider is constructed along with a
drop down menu. If ``'multiple'``, then a slider is constructed for each
parameter.
parameters_bounds : (`float`, `float`), optional
The minimum and maximum bounds, in std units, for the sliders.
figure_size : (`int`, `int`), optional
The size of the plotted figures.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
print('Initializing...')
# Get the number of levels
n_levels = aam.n_scales
# Define the styling options
main_style = 'info'
# Get the maximum number of components per level
max_n_shape = [sp.model.n_active_components for sp in aam.shape_models]
max_n_appearance = [ap.n_active_components for ap in aam.appearance_models]
# Check the given number of parameters (the returned n_parameters is a list
# of len n_scales)
n_shape_parameters = check_n_parameters(n_shape_parameters, n_levels,
max_n_shape)
n_appearance_parameters = check_n_parameters(n_appearance_parameters,
n_levels, max_n_appearance)
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Compute weights and instance
shape_weights = shape_model_parameters_wid.selected_values
appearance_weights = appearance_model_parameters_wid.selected_values
instance = aam.instance(scale_index=level, shape_weights=shape_weights,
appearance_weights=appearance_weights)
image_is_masked = isinstance(instance, MaskedImage)
g = landmark_options_wid.selected_values['landmarks']['group']
# Create options dictionary
options = dict()
options.update(landmark_options_wid.selected_values['lines'])
options.update(landmark_options_wid.selected_values['markers'])
options.update(
renderer_options_wid.selected_values['numbering_matplotlib'])
options.update(renderer_options_wid.selected_values['axes'])
options.update(renderer_options_wid.selected_values['legend'])
options.update(image_options_wid.selected_values)
options.update(landmark_options_wid.selected_values['landmarks'])
# Correct options based on the type of the shape
if (instance.has_landmarks and
hasattr(instance.landmarks[g], 'labels')):
# If the shape is a LabelledPointUndirectedGraph ...
# ...correct colours
line_colour = []
marker_face_colour = []
marker_edge_colour = []
for lbl in options['with_labels']:
id = instance.landmarks[g].labels.index(lbl)
line_colour.append(options['line_colour'][id])
marker_face_colour.append(options['marker_face_colour'][id])
marker_edge_colour.append(options['marker_edge_colour'][id])
options['line_colour'] = line_colour
options['marker_face_colour'] = marker_face_colour
options['marker_edge_colour'] = marker_edge_colour
else:
# If shape is PointCloud, TriMesh or PointGraph
# ...correct colours
options['line_colour'] = options['line_colour'][0]
options['marker_face_colour'] = options['marker_face_colour'][0]
options['marker_edge_colour'] = options['marker_edge_colour'][0]
# Get figure size
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] *
figure_size[0],
renderer_options_wid.selected_values['zoom_one'] *
figure_size[1])
# show image with selected options
save_figure_wid.renderer = render_image(
image=instance, renderer=save_figure_wid.renderer,
image_is_masked=image_is_masked, figure_size=new_figure_size,
**options)
# Update info
update_info(aam, instance, level, g)
# Define function that updates the info text
def update_info(aam, instance, level, group):
# features info
lvl_app_mod = aam.appearance_models[level]
lvl_shape_mod = aam.shape_models[level].model
aam_mean = lvl_app_mod.mean()
n_channels = aam_mean.n_channels
tmplt_inst = lvl_app_mod.mean()
feat = aam.holistic_features[level]
# Feature string
tmp_feat = 'Feature is {} with {} channel{}'.format(
name_of_callable(feat), n_channels, 's' * (n_channels > 1))
# update info widgets
text_per_line = [
"> Warp using {} transform".format(aam.transform.__name__),
"> Level {}/{}".format(
level + 1, aam.n_scales),
"> {} landmark points".format(
instance.landmarks[group].n_points),
"> {} shape components ({:.2f}% of variance)".format(
lvl_shape_mod.n_components,
lvl_shape_mod.variance_ratio() * 100),
"> {}".format(tmp_feat),
"> Reference frame of length {} ({} x {}C, {} x {}C)".format(
lvl_app_mod.n_features, tmplt_inst.n_true_pixels(), n_channels,
tmplt_inst._str_shape(), n_channels),
"> {} appearance components ({:.2f}% of variance)".format(
lvl_app_mod.n_components, lvl_app_mod.variance_ratio() * 100),
"> Instance: min={:.3f} , max={:.3f}".format(
instance.pixels.min(), instance.pixels.max())]
info_wid.set_widget_state(text_per_line=text_per_line)
# Plot shape variance function
def plot_shape_variance(name):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
plt.subplot(121)
save_figure_wid.renderer = \
aam.shape_models[level].model.plot_eigenvalues_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False)
plt.subplot(122)
save_figure_wid.renderer = \
aam.shape_models[level].model.plot_eigenvalues_cumulative_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Plot appearance variance function
def plot_appearance_variance(name):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
plt.subplot(121)
save_figure_wid.renderer = \
aam.appearance_models[level].plot_eigenvalues_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False)
plt.subplot(122)
save_figure_wid.renderer = \
aam.appearance_models[level].plot_eigenvalues_cumulative_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Create widgets
shape_model_parameters_wid = LinearModelParametersWidget(
n_shape_parameters[0], render_function, params_str='Parameter ',
mode=mode, params_bounds=parameters_bounds, params_step=0.1,
plot_variance_visible=True, plot_variance_function=plot_shape_variance,
animation_step=0.5, interval=0., loop_enabled=True)
appearance_model_parameters_wid = LinearModelParametersWidget(
n_appearance_parameters[0], render_function, params_str='Parameter ',
mode=mode, params_bounds=parameters_bounds, params_step=0.1,
plot_variance_visible=True,
plot_variance_function=plot_appearance_variance,
animation_step=0.5, interval=0., loop_enabled=True)
groups_keys, labels_keys = extract_groups_labels_from_image(
aam.appearance_models[0].mean())
image_options_wid = ImageOptionsWidget(
n_channels=aam.appearance_models[0].mean().n_channels,
image_is_masked=isinstance(aam.appearance_models[0].mean(),
MaskedImage),
render_function=render_function)
landmark_options_wid = LandmarkOptionsWidget(
group_keys=groups_keys, labels_keys=labels_keys,
type='2D', render_function=render_function)
renderer_options_wid = RendererOptionsWidget(
options_tabs=['zoom_one', 'axes', 'numbering_matplotlib', 'legend'],
labels=None, axes_x_limits=None, axes_y_limits=None,
render_function=render_function)
info_wid = TextPrintWidget(text_per_line=[''])
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
# Group widgets
model_parameters_wid = ipywidgets.HBox(
[ipywidgets.Tab([shape_model_parameters_wid,
appearance_model_parameters_wid])])
model_parameters_wid.children[0].set_title(0, 'Shape')
model_parameters_wid.children[0].set_title(1, 'Appearance')
tmp_children = [model_parameters_wid]
if n_levels > 1:
# Define function that updates options' widgets state
def update_widgets(change):
value = change['new']
# Update shape model parameters
shape_model_parameters_wid.set_widget_state(
n_shape_parameters[value], params_str='Parameter ',
allow_callback=False)
# Update appearance model parameters
appearance_model_parameters_wid.set_widget_state(
n_appearance_parameters[value], params_str='Parameter ',
allow_callback=False)
# Update landmarks options
g_keys, l_keys = extract_groups_labels_from_image(
aam.appearance_models[value].mean())
landmark_options_wid.set_widget_state(
group_keys=g_keys, labels_keys=l_keys, allow_callback=False)
# Update channel options
image_options_wid.set_widget_state(
n_channels=aam.appearance_models[value].mean().n_channels,
image_is_masked=isinstance(aam.appearance_models[value].mean(),
MaskedImage),
allow_callback=True)
radio_str = OrderedDict()
for l in range(n_levels):
if l == 0:
radio_str["Level {} (low)".format(l)] = l
elif l == n_levels - 1:
radio_str["Level {} (high)".format(l)] = l
else:
radio_str["Level {}".format(l)] = l
level_wid = ipywidgets.RadioButtons(
options=radio_str, description='Pyramid', value=n_levels-1,
layout=ipywidgets.Layout(width='6cm'))
level_wid.observe(update_widgets, names='value', type='change')
level_wid.observe(render_function, names='value', type='change')
tmp_children.insert(0, level_wid)
tmp_wid = ipywidgets.HBox(tmp_children)
options_box = ipywidgets.Tab([tmp_wid, image_options_wid,
landmark_options_wid, renderer_options_wid,
info_wid, save_figure_wid])
tab_titles = ['Model', 'Image', 'Landmarks', 'Renderer', 'Info', 'Export']
for (k, tl) in enumerate(tab_titles):
options_box.set_title(k, tl)
logo_wid = LogoWidget(style=main_style)
logo_wid.layout.margin = '0px 10px 0px 0px'
wid = ipywidgets.HBox([logo_wid, options_box])
# Set widget's style
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
# Display final widget
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
def visualize_patch_aam(aam, n_shape_parameters=5, n_appearance_parameters=5,
mode='multiple', parameters_bounds=(-3.0, 3.0),
figure_size=(7, 7)):
r"""
Widget that allows the dynamic visualization of a multi-scale patch-based
Active Appearance Model.
Parameters
----------
aam : `menpofit.aam.PatchAAM`
The multi-scale patch-based AAM to be visualized. Note that each level
can have different number of components.
n_shape_parameters : `int` or `list` of `int` or ``None``, optional
The number of principal components to be used for the shape parameters
sliders. If `int`, then the number of sliders per level is the minimum
between `n_parameters` and the number of active components per level.
If `list` of `int`, then a number of sliders is defined per level.
If ``None``, all the active components per level will have a slider.
n_appearance_parameters : `int` or `list` of `int` or ``None``, optional
The number of principal components to be used for the appearance
parameters sliders. If `int`, then the number of sliders per level is
the minimum between `n_parameters` and the number of active components
per level. If `list` of `int`, then a number of sliders is defined per
level. If ``None``, all the active components per level will have a
slider.
mode : ``{'single', 'multiple'}``, optional
If ``'single'``, then only a single slider is constructed along with a
drop down menu. If ``'multiple'``, then a slider is constructed for each
parameter.
parameters_bounds : (`float`, `float`), optional
The minimum and maximum bounds, in std units, for the sliders.
figure_size : (`int`, `int`), optional
The size of the plotted figures.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
print('Initializing...')
# Get the number of levels
n_levels = aam.n_scales
# Define the styling options
main_style = 'info'
# Get the maximum number of components per level
max_n_shape = [sp.model.n_active_components for sp in aam.shape_models]
max_n_appearance = [ap.n_active_components for ap in aam.appearance_models]
# Check the given number of parameters (the returned n_parameters is a list
# of len n_scales)
n_shape_parameters = check_n_parameters(n_shape_parameters, n_levels,
max_n_shape)
n_appearance_parameters = check_n_parameters(n_appearance_parameters,
n_levels, max_n_appearance)
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Compute weights and instance
shape_weights = shape_model_parameters_wid.selected_values
appearance_weights = appearance_model_parameters_wid.selected_values
shape_instance, appearance_instance = aam.instance(
scale_index=level, shape_weights=shape_weights,
appearance_weights=appearance_weights)
# Render instance with selected options
options = dict()
options.update(shape_options_wid.selected_values['lines'])
options.update(shape_options_wid.selected_values['markers'])
options.update(
renderer_options_wid.selected_values['numbering_matplotlib'])
options.update(renderer_options_wid.selected_values['axes'])
image_options = dict(image_options_wid.selected_values)
del image_options['masked_enabled']
options.update(image_options)
options.update(patch_options_wid.selected_values)
options['line_colour'] = options['line_colour'][0]
options['marker_face_colour'] = options['marker_face_colour'][0]
options['marker_edge_colour'] = options['marker_edge_colour'][0]
# Get figure size
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * figure_size[0],
renderer_options_wid.selected_values['zoom_one'] * figure_size[1])
# show image with selected options
save_figure_wid.renderer = render_patches(
patches=appearance_instance.pixels, patch_centers=shape_instance,
renderer=save_figure_wid.renderer, figure_size=new_figure_size,
**options)
# Update info
update_info(aam, appearance_instance, level)
# Define function that updates the info text
def update_info(aam, appearance_instance, level):
lvl_app_mod = aam.appearance_models[level]
lvl_shape_mod = aam.shape_models[level].model
n_channels = lvl_app_mod.mean().pixels.shape[2]
feat = aam.holistic_features[level]
# Feature string
tmp_feat = 'Feature is {} with {} channel{}'.format(
name_of_callable(feat), n_channels, 's' * (n_channels > 1))
n_feat = (appearance_instance.pixels.shape[0] *
appearance_instance.pixels.shape[2] *
appearance_instance.pixels.shape[3] *
appearance_instance.pixels.shape[4])
# update info widgets
text_per_line = [
"> No image warping performed.",
"> Level {}/{}".format(level + 1, aam.n_scales),
"> {} landmark points".format(appearance_instance.pixels.shape[0]),
"> {} shape components ({:.2f}% of variance)".format(
lvl_shape_mod.n_components,
lvl_shape_mod.variance_ratio() * 100),
"> {}".format(tmp_feat),
"> Reference frame of length {} ({} patches of shape {} x {} "
"and {} channel{}.)".format(
n_feat, appearance_instance.pixels.shape[0],
appearance_instance.pixels.shape[3],
appearance_instance.pixels.shape[4],
appearance_instance.pixels.shape[2],
's' * (appearance_instance.pixels.shape[2] > 1)),
"> {} appearance components ({:.2f}% of variance)".format(
lvl_app_mod.n_components, lvl_app_mod.variance_ratio() * 100),
"> Instance: min={:.3f} , max={:.3f}".format(
appearance_instance.pixels.min(),
appearance_instance.pixels.max())]
info_wid.set_widget_state(text_per_line=text_per_line)
# Plot shape variance function
def plot_shape_variance(name):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
plt.subplot(121)
save_figure_wid.renderer = \
aam.shape_models[level].model.plot_eigenvalues_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False)
plt.subplot(122)
save_figure_wid.renderer = \
aam.shape_models[level].model.plot_eigenvalues_cumulative_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Plot appearance variance function
def plot_appearance_variance(name):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
plt.subplot(121)
save_figure_wid.renderer = \
aam.appearance_models[level].plot_eigenvalues_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False)
plt.subplot(122)
save_figure_wid.renderer = \
aam.appearance_models[level].plot_eigenvalues_cumulative_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Create widgets
shape_model_parameters_wid = LinearModelParametersWidget(
n_shape_parameters[0], render_function, params_str='Parameter ',
mode=mode, params_bounds=parameters_bounds, params_step=0.1,
plot_variance_visible=True, plot_variance_function=plot_shape_variance,
animation_step=0.5, interval=0., loop_enabled=True)
appearance_model_parameters_wid = LinearModelParametersWidget(
n_appearance_parameters[0], render_function, params_str='Parameter ',
mode=mode, params_bounds=parameters_bounds, params_step=0.1,
plot_variance_visible=True,
plot_variance_function=plot_appearance_variance, animation_step=0.5,
interval=0., loop_enabled=True)
shape_options_wid = Shape2DOptionsWidget(labels=None, render_function=None)
shape_options_wid.line_options_wid.render_lines_switch.button_wid.value = False
shape_options_wid.add_render_function(render_function)
patch_options_wid = PatchOptionsWidget(
n_patches=aam.appearance_models[0].mean().pixels.shape[0],
n_offsets=aam.appearance_models[0].mean().pixels.shape[1],
render_function=render_function)
image_options_wid = ImageOptionsWidget(
n_channels=aam.appearance_models[0].mean().pixels.shape[2],
image_is_masked=False, render_function=None)
image_options_wid.interpolation_checkbox.button_wid.value = False
image_options_wid.add_render_function(render_function)
renderer_options_wid = RendererOptionsWidget(
options_tabs=['zoom_one', 'axes', 'numbering_matplotlib'], labels=None,
axes_x_limits=None, axes_y_limits=None,
render_function=render_function)
info_wid = TextPrintWidget(text_per_line=[''])
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
# Group widgets
model_parameters_wid = ipywidgets.HBox(
[ipywidgets.Tab([shape_model_parameters_wid,
appearance_model_parameters_wid])])
model_parameters_wid.children[0].set_title(0, 'Shape')
model_parameters_wid.children[0].set_title(1, 'Appearance')
tmp_children = [model_parameters_wid]
if n_levels > 1:
# Define function that updates options' widgets state
def update_widgets(change):
value = change['new']
# Update shape model parameters
shape_model_parameters_wid.set_widget_state(
n_shape_parameters[value], params_str='Parameter ',
allow_callback=False)
# Update appearance model parameters
appearance_model_parameters_wid.set_widget_state(
n_appearance_parameters[value], params_str='Parameter ',
allow_callback=False)
# Update patch options
patch_options_wid.set_widget_state(
n_patches=aam.appearance_models[value].mean().pixels.shape[0],
n_offsets=aam.appearance_models[value].mean().pixels.shape[1],
allow_callback=False)
# Update channels options
image_options_wid.set_widget_state(
n_channels=aam.appearance_models[value].mean().pixels.shape[2],
image_is_masked=False, allow_callback=True)
# Create pyramid radiobuttons
radio_str = OrderedDict()
for l in range(n_levels):
if l == 0:
radio_str["Level {} (low)".format(l)] = l
elif l == n_levels - 1:
radio_str["Level {} (high)".format(l)] = l
else:
radio_str["Level {}".format(l)] = l
level_wid = ipywidgets.RadioButtons(
options=radio_str, description='Pyramid', value=n_levels-1,
layout=ipywidgets.Layout(width='6cm'))
level_wid.observe(update_widgets, names='value', type='change')
level_wid.observe(render_function, names='value', type='change')
tmp_children.insert(0, level_wid)
tmp_wid = ipywidgets.HBox(tmp_children)
options_box = ipywidgets.Tab([tmp_wid, patch_options_wid,
image_options_wid, shape_options_wid,
renderer_options_wid, info_wid,
save_figure_wid])
tab_titles = ['Model', 'Patches', 'Channels', 'Shape', 'Renderer', 'Info',
'Export']
for (k, tl) in enumerate(tab_titles):
options_box.set_title(k, tl)
logo_wid = LogoWidget(style=main_style)
logo_wid.layout.margin = '0px 10px 0px 0px'
wid = ipywidgets.HBox([logo_wid, options_box])
# Set widget's style
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
# Display final widget
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
def visualize_atm(atm, n_shape_parameters=5, mode='multiple',
parameters_bounds=(-3.0, 3.0), figure_size=(7, 7)):
r"""
Widget that allows the dynamic visualization of a multi-scale Active
Template Model.
Parameters
----------
atm : `menpofit.atm.ATM`
The multi-scale ATM to be visualized. Note that each level can have
different number of components.
n_shape_parameters : `int` or `list` of `int` or ``None``, optional
The number of principal components to be used for the shape parameters
sliders. If `int`, then the number of sliders per level is the minimum
between `n_parameters` and the number of active components per level.
If `list` of `int`, then a number of sliders is defined per level.
If ``None``, all the active components per level will have a slider.
mode : ``{'single', 'multiple'}``, optional
If ``'single'``, then only a single slider is constructed along with a
drop down menu. If ``'multiple'``, then a slider is constructed for each
parameter.
parameters_bounds : (`float`, `float`), optional
The minimum and maximum bounds, in std units, for the sliders.
figure_size : (`int`, `int`), optional
The size of the plotted figures.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
print('Initializing...')
# Get the number of levels
n_levels = atm.n_scales
# Define the styling options
main_style = 'info'
# Get the maximum number of components per level
max_n_shape = [sp.model.n_active_components for sp in atm.shape_models]
# Check the given number of parameters (the returned n_parameters is a list
# of len n_scales)
n_shape_parameters = check_n_parameters(n_shape_parameters, n_levels,
max_n_shape)
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Compute weights and instance
shape_weights = shape_model_parameters_wid.selected_values
instance = atm.instance(scale_index=level, shape_weights=shape_weights)
image_is_masked = isinstance(instance, MaskedImage)
g = landmark_options_wid.selected_values['landmarks']['group']
# Render instance with selected options
options = dict()
options.update(landmark_options_wid.selected_values['lines'])
options.update(landmark_options_wid.selected_values['markers'])
options.update(
renderer_options_wid.selected_values['numbering_matplotlib'])
options.update(renderer_options_wid.selected_values['axes'])
options.update(renderer_options_wid.selected_values['legend'])
options.update(image_options_wid.selected_values)
options.update(landmark_options_wid.selected_values['landmarks'])
# Correct options based on the type of the shape
if (instance.has_landmarks and
hasattr(instance.landmarks[g], 'labels')):
# If the shape is a LabelledPointUndirectedGraph ...
# ...correct colours
line_colour = []
marker_face_colour = []
marker_edge_colour = []
for lbl in options['with_labels']:
id = instance.landmarks[g].labels.index(lbl)
line_colour.append(options['line_colour'][id])
marker_face_colour.append(options['marker_face_colour'][id])
marker_edge_colour.append(options['marker_edge_colour'][id])
options['line_colour'] = line_colour
options['marker_face_colour'] = marker_face_colour
options['marker_edge_colour'] = marker_edge_colour
else:
# If shape is PointCloud, TriMesh or PointGraph
# ...correct colours
options['line_colour'] = options['line_colour'][0]
options['marker_face_colour'] = options['marker_face_colour'][0]
options['marker_edge_colour'] = options['marker_edge_colour'][0]
# Get figure size
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] *
figure_size[0],
renderer_options_wid.selected_values['zoom_one'] *
figure_size[1])
# Render shape with selected options
save_figure_wid.renderer = render_image(
image=instance, renderer=save_figure_wid.renderer,
image_is_masked=image_is_masked, figure_size=new_figure_size,
**options)
# Update info
update_info(atm, instance, level, g)
# Define function that updates the info text
def update_info(atm, instance, level, group):
lvl_shape_mod = atm.shape_models[level].model
tmplt_inst = atm.warped_templates[level]
n_channels = tmplt_inst.n_channels
feat = atm.holistic_features[level]
# Feature string
tmp_feat = 'Feature is {} with {} channel{}'.format(
name_of_callable(feat), n_channels, 's' * (n_channels > 1))
# update info widgets
text_per_line = [
"> Warp using {} transform".format(atm.transform.__name__),
"> Level {}/{}".format(
level + 1, atm.n_scales),
"> {} landmark points".format(
instance.landmarks[group].n_points),
"> {} shape components ({:.2f}% of variance)".format(
lvl_shape_mod.n_components,
lvl_shape_mod.variance_ratio() * 100),
"> {}".format(tmp_feat),
"> Reference frame of length {} ({} x {}C, {} x {}C)".format(
tmplt_inst.n_true_pixels() * n_channels,
tmplt_inst.n_true_pixels(), n_channels, tmplt_inst._str_shape(),
n_channels),
"> Instance: min={:.3f} , max={:.3f}".format(
instance.pixels.min(), instance.pixels.max())]
info_wid.set_widget_state(text_per_line=text_per_line)
# Plot shape variance function
def plot_shape_variance(name):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
plt.subplot(121)
save_figure_wid.renderer = \
atm.shape_models[level].model.plot_eigenvalues_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False)
plt.subplot(122)
save_figure_wid.renderer = \
atm.shape_models[level].model.plot_eigenvalues_cumulative_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Create widgets
shape_model_parameters_wid = LinearModelParametersWidget(
n_shape_parameters[0], render_function, params_str='Parameter ',
mode=mode, params_bounds=parameters_bounds, params_step=0.1,
plot_variance_visible=True, plot_variance_function=plot_shape_variance,
animation_step=0.5, interval=0., loop_enabled=True)
groups_keys, labels_keys = extract_groups_labels_from_image(
atm.warped_templates[0])
image_options_wid = ImageOptionsWidget(
n_channels=atm.warped_templates[0].n_channels,
image_is_masked=isinstance(atm.warped_templates[0], MaskedImage),
render_function=render_function)
landmark_options_wid = LandmarkOptionsWidget(
group_keys=groups_keys, labels_keys=labels_keys,
type='2D', render_function=render_function)
renderer_options_wid = RendererOptionsWidget(
options_tabs=['zoom_one', 'axes', 'numbering_matplotlib', 'legend'],
labels=None, axes_x_limits=None, axes_y_limits=None,
render_function=render_function)
info_wid = TextPrintWidget(text_per_line=[''])
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
# Group widgets
tmp_children = [shape_model_parameters_wid]
if n_levels > 1:
# Define function that updates options' widgets state
def update_widgets(change):
value = change['new']
# Update shape model parameters
shape_model_parameters_wid.set_widget_state(
n_shape_parameters[value], params_str='Parameter ',
allow_callback=False)
# Update landmarks options
g_keys, l_keys = extract_groups_labels_from_image(
atm.warped_templates[value])
landmark_options_wid.set_widget_state(
group_keys=g_keys, labels_keys=l_keys, allow_callback=False)
# Update channel options
image_options_wid.set_widget_state(
n_channels=atm.warped_templates[value].n_channels,
image_is_masked=isinstance(atm.warped_templates[value],
MaskedImage),
allow_callback=True)
# Create pyramid radiobuttons
radio_str = OrderedDict()
for l in range(n_levels):
if l == 0:
radio_str["Level {} (low)".format(l)] = l
elif l == n_levels - 1:
radio_str["Level {} (high)".format(l)] = l
else:
radio_str["Level {}".format(l)] = l
level_wid = ipywidgets.RadioButtons(
options=radio_str, description='Pyramid', value=n_levels-1,
layout=ipywidgets.Layout(width='6cm'))
level_wid.observe(update_widgets, names='value', type='change')
level_wid.observe(render_function, names='value', type='change')
tmp_children.insert(0, level_wid)
tmp_wid = ipywidgets.HBox(tmp_children)
options_box = ipywidgets.Tab([tmp_wid, image_options_wid,
landmark_options_wid, renderer_options_wid,
info_wid, save_figure_wid])
tab_titles = ['Model', 'Image', 'Landmarks', 'Renderer', 'Info', 'Export']
for (k, tl) in enumerate(tab_titles):
options_box.set_title(k, tl)
logo_wid = LogoWidget(style=main_style)
logo_wid.layout.margin = '0px 10px 0px 0px'
wid = ipywidgets.HBox([logo_wid, options_box])
# Set widget's style
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
# Display final widget
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
def visualize_patch_atm(atm, n_shape_parameters=5, mode='multiple',
parameters_bounds=(-3.0, 3.0), figure_size=(7, 7)):
r"""
Widget that allows the dynamic visualization of a multi-scale patch-based
Active Template Model.
Parameters
----------
atm : `menpofit.atm.PatchATM`
The multi-scale patch-based ATM to be visualized. Note that each level
can have different number of components.
n_shape_parameters : `int` or `list` of `int` or ``None``, optional
The number of principal components to be used for the shape parameters
sliders. If `int`, then the number of sliders per level is the minimum
between `n_parameters` and the number of active components per level.
If `list` of `int`, then a number of sliders is defined per level.
If ``None``, all the active components per level will have a slider.
mode : ``{'single', 'multiple'}``, optional
If ``'single'``, then only a single slider is constructed along with a
drop down menu. If ``'multiple'``, then a slider is constructed for each
parameter.
parameters_bounds : (`float`, `float`), optional
The minimum and maximum bounds, in std units, for the sliders.
figure_size : (`int`, `int`), optional
The size of the plotted figures.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
print('Initializing...')
# Get the number of levels
n_levels = atm.n_scales
# Define the styling options
main_style = 'info'
# Get the maximum number of components per level
max_n_shape = [sp.n_active_components for sp in atm.shape_models]
# Check the given number of parameters (the returned n_parameters is a list
# of len n_scales)
n_shape_parameters = check_n_parameters(n_shape_parameters, n_levels,
max_n_shape)
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Compute weights and instance
shape_weights = shape_model_parameters_wid.selected_values
shape_instance, template = atm.instance(scale_index=level,
shape_weights=shape_weights)
# Create options dictionary
options = dict()
options.update(shape_options_wid.selected_values['lines'])
options.update(shape_options_wid.selected_values['markers'])
options.update(
renderer_options_wid.selected_values['numbering_matplotlib'])
options.update(renderer_options_wid.selected_values['axes'])
image_options = dict(image_options_wid.selected_values)
del image_options['masked_enabled']
options.update(image_options)
options.update(patch_options_wid.selected_values)
options['line_colour'] = options['line_colour'][0]
options['marker_face_colour'] = options['marker_face_colour'][0]
options['marker_edge_colour'] = options['marker_edge_colour'][0]
# Get figure size
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * figure_size[0],
renderer_options_wid.selected_values['zoom_one'] * figure_size[1])
# show image with selected options
save_figure_wid.renderer = render_patches(
patches=template.pixels, patch_centers=shape_instance,
renderer=save_figure_wid.renderer, figure_size=new_figure_size,
**options)
# Update info
update_info(atm, template, level)
# Define function that updates the info text
def update_info(atm, instance, level):
lvl_shape_mod = atm.shape_models[level].model
n_channels = instance.pixels.shape[2]
feat = atm.holistic_features[level]
# Feature string
tmp_feat = 'Feature is {} with {} channel{}'.format(
name_of_callable(feat), n_channels, 's' * (n_channels > 1))
n_feat = (instance.pixels.shape[0] * instance.pixels.shape[2] *
instance.pixels.shape[3] * instance.pixels.shape[4])
# update info widgets
text_per_line = [
"> Warp using {} transform".format(atm.transform.__name__),
"> Level {}/{}".format(
level + 1, atm.n_scales),
"> {} landmark points".format(instance.pixels.shape[0]),
"> {} shape components ({:.2f}% of variance)".format(
lvl_shape_mod.n_components,
lvl_shape_mod.variance_ratio() * 100),
"> {}".format(tmp_feat),
"> Reference frame of length {} ({} patches of shape {} x {} "
"and {} channel{}.)".format(
n_feat, instance.pixels.shape[0], instance.pixels.shape[3],
instance.pixels.shape[4], instance.pixels.shape[2],
's' * (instance.pixels.shape[2] > 1)),
"> Instance: min={:.3f} , max={:.3f}".format(
instance.pixels.min(), instance.pixels.max())]
info_wid.set_widget_state(text_per_line=text_per_line)
# Plot shape variance function
def plot_shape_variance(name):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
plt.subplot(121)
save_figure_wid.renderer = \
atm.shape_models[level].model.plot_eigenvalues_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False)
plt.subplot(122)
save_figure_wid.renderer = \
atm.shape_models[level].model.plot_eigenvalues_cumulative_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Create widgets
shape_model_parameters_wid = LinearModelParametersWidget(
n_shape_parameters[0], render_function, params_str='Parameter ',
mode=mode, params_bounds=parameters_bounds, params_step=0.1,
plot_variance_visible=True, plot_variance_function=plot_shape_variance,
animation_step=0.5, interval=0., loop_enabled=True)
shape_options_wid = Shape2DOptionsWidget(labels=None, render_function=None)
shape_options_wid.line_options_wid.render_lines_switch.button_wid.value = False
shape_options_wid.add_render_function(render_function)
patch_options_wid = PatchOptionsWidget(
n_patches=atm.warped_templates[0].pixels.shape[0],
n_offsets=atm.warped_templates[0].pixels.shape[1],
render_function=render_function)
image_options_wid = ImageOptionsWidget(
n_channels=atm.warped_templates[0].pixels.shape[2],
image_is_masked=False, render_function=None)
image_options_wid.interpolation_checkbox.button_wid.value = False
image_options_wid.add_render_function(render_function)
renderer_options_wid = RendererOptionsWidget(
options_tabs=['zoom_one', 'axes', 'numbering_matplotlib'], labels=None,
axes_x_limits=None, axes_y_limits=None, render_function=render_function)
info_wid = TextPrintWidget(text_per_line=[''])
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
# Group widgets
tmp_children = [shape_model_parameters_wid]
if n_levels > 1:
# Define function that updates options' widgets state
def update_widgets(change):
value = change['new']
# Update shape model parameters
shape_model_parameters_wid.set_widget_state(
n_shape_parameters[value], params_str='Parameter ',
allow_callback=False)
# Update patch options
patch_options_wid.set_widget_state(
n_patches=atm.warped_templates[value].pixels.shape[0],
n_offsets=atm.warped_templates[value].pixels.shape[1],
allow_callback=False)
# Update channels options
image_options_wid.set_widget_state(
n_channels=atm.warped_templates[value].pixels.shape[2],
image_is_masked=False, allow_callback=True)
# Pyramid radiobuttons
radio_str = OrderedDict()
for l in range(n_levels):
if l == 0:
radio_str["Level {} (low)".format(l)] = l
elif l == n_levels - 1:
radio_str["Level {} (high)".format(l)] = l
else:
radio_str["Level {}".format(l)] = l
level_wid = ipywidgets.RadioButtons(
options=radio_str, description='Pyramid', value=n_levels-1,
layout=ipywidgets.Layout(width='6cm'))
level_wid.observe(update_widgets, names='value', type='change')
level_wid.observe(render_function, names='value', type='change')
tmp_children.insert(0, level_wid)
tmp_wid = ipywidgets.HBox(tmp_children)
options_box = ipywidgets.Tab([tmp_wid, patch_options_wid,
image_options_wid, shape_options_wid,
renderer_options_wid, info_wid,
save_figure_wid])
tab_titles = ['Model', 'Patches', 'Image', 'Shape', 'Renderer', 'Info',
'Export']
for (k, tl) in enumerate(tab_titles):
options_box.set_title(k, tl)
logo_wid = LogoWidget(style=main_style)
logo_wid.layout.margin = '0px 10px 0px 0px'
wid = ipywidgets.HBox([logo_wid, options_box])
# Set widget's style
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
# Display final widget
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
def visualize_clm(clm, n_shape_parameters=5, mode='multiple',
parameters_bounds=(-3.0, 3.0), figure_size=(7, 7)):
r"""
Widget that allows the dynamic visualization of a multi-scale Constrained
Local Model.
Parameters
----------
clm : `menpofit.clm.CLM`
The multi-scale CLM to be visualized. Note that each level can have
different number of components.
n_shape_parameters : `int` or `list` of `int` or ``None``, optional
The number of principal components to be used for the shape parameters
sliders. If `int`, then the number of sliders per level is the minimum
between `n_parameters` and the number of active components per level.
If `list` of `int`, then a number of sliders is defined per level.
If ``None``, all the active components per level will have a slider.
mode : ``{'single', 'multiple'}``, optional
If ``'single'``, then only a single slider is constructed along with a
drop down menu. If ``'multiple'``, then a slider is constructed for each
parameter.
parameters_bounds : (`float`, `float`), optional
The minimum and maximum bounds, in std units, for the sliders.
figure_size : (`int`, `int`), optional
The size of the plotted figures.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
print('Initializing...')
# Get the number of levels
n_levels = clm.n_scales
# Define the styling options
main_style = 'info'
# Get the maximum number of components per level
max_n_shape = [sp.n_active_components for sp in clm.shape_models]
# Check the given number of parameters (the returned n_parameters is a list
# of len n_scales)
n_shape_parameters = check_n_parameters(n_shape_parameters, n_levels,
max_n_shape)
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Compute weights and instance
shape_weights = shape_model_parameters_wid.selected_values
shape_instance = clm.shape_instance(scale_index=level,
shape_weights=shape_weights)
if domain_toggles.value == 'spatial':
patches = _convert_patches_list_to_single_array(
clm.expert_ensembles[level].spatial_filter_images,
clm.expert_ensembles[level].n_experts)
else:
patches = _convert_patches_list_to_single_array(
clm.expert_ensembles[level].frequency_filter_images,
clm.expert_ensembles[level].n_experts)
# Create options dictionary
options = dict()
options.update(shape_options_wid.selected_values['lines'])
options.update(shape_options_wid.selected_values['markers'])
options.update(
renderer_options_wid.selected_values['numbering_matplotlib'])
options.update(renderer_options_wid.selected_values['axes'])
image_options = dict(image_options_wid.selected_values)
del image_options['masked_enabled']
options.update(image_options)
options.update(patch_options_wid.selected_values)
options['line_colour'] = options['line_colour'][0]
options['marker_face_colour'] = options['marker_face_colour'][0]
options['marker_edge_colour'] = options['marker_edge_colour'][0]
# Get figure size
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * figure_size[0],
renderer_options_wid.selected_values['zoom_one'] * figure_size[1])
# show image with selected options
save_figure_wid.renderer = render_patches(
patches=patches, patch_centers=shape_instance,
renderer=save_figure_wid.renderer, figure_size=new_figure_size,
**options)
# Update info
update_info(clm, patches, level)
# Define function that updates the info text
def update_info(clm, patches, level):
lvl_shape_mod = clm.shape_models[level].model
n_channels = patches.shape[2]
feat = clm.holistic_features[level]
# Feature string
tmp_feat = 'Feature is {} with {} channel{}'.format(
name_of_callable(feat), n_channels, 's' * (n_channels > 1))
# update info widgets
text_per_line = [
"> {} ensemble of experts".format(
name_of_callable(clm.expert_ensemble_cls[level])),
"> Level {}/{}".format(level + 1, clm.n_scales),
"> {} experts (landmark points)".format(
clm.expert_ensembles[level].n_experts),
"> {} shape components ({:.2f}% of variance)".format(
lvl_shape_mod.n_components,
lvl_shape_mod.variance_ratio() * 100),
"> {}".format(tmp_feat),
"> Patch shape: {} x {}".format(
clm.expert_ensembles[level].search_shape[0],
clm.expert_ensembles[level].search_shape[1]),
"> Instance: min={:.3f} , max={:.3f}".format(
patches.min(), patches.max())]
info_wid.set_widget_state(text_per_line=text_per_line)
# Plot shape variance function
def plot_shape_variance(name):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
plt.subplot(121)
save_figure_wid.renderer = \
clm.shape_models[level].model.plot_eigenvalues_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False)
plt.subplot(122)
save_figure_wid.renderer = \
clm.shape_models[level].model.plot_eigenvalues_cumulative_ratio(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Create widgets
shape_model_parameters_wid = LinearModelParametersWidget(
n_shape_parameters[0], render_function, params_str='Parameter ',
mode=mode, params_bounds=parameters_bounds, params_step=0.1,
plot_variance_visible=True, plot_variance_function=plot_shape_variance,
animation_step=0.5, interval=0., loop_enabled=True)
shape_options_wid = Shape2DOptionsWidget(labels=None, render_function=None)
shape_options_wid.line_options_wid.render_lines_switch.button_wid.value = False
shape_options_wid.add_render_function(render_function)
patch_options_wid = PatchOptionsWidget(
n_patches=clm.expert_ensembles[0].n_experts, n_offsets=1,
render_function=None)
patch_options_wid.bboxes_line_colour_widget.set_colours(
'white', allow_callback=False)
patch_options_wid.add_render_function(render_function)
patch_options_wid.layout.margin = '10px 0px 0px 0px'
domain_toggles = ipywidgets.ToggleButtons(
description='Domain', options=['spatial', 'frequency'], value='spatial')
domain_toggles.observe(render_function, names='value', type='change')
experts_box = ipywidgets.VBox([domain_toggles, patch_options_wid])
image_options_wid = ImageOptionsWidget(
n_channels=clm.expert_ensembles[0].spatial_filter_images[0].n_channels,
image_is_masked=False, render_function=None)
image_options_wid.interpolation_checkbox.button_wid.value = False
image_options_wid.cmap_select.value = 'afmhot'
image_options_wid.add_render_function(render_function)
renderer_options_wid = RendererOptionsWidget(
options_tabs=['zoom_one', 'axes', 'numbering_matplotlib'], labels=None,
axes_x_limits=None, axes_y_limits=None,
render_function=render_function)
info_wid = TextPrintWidget(text_per_line=[''])
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
# Group widgets
tmp_children = [shape_model_parameters_wid]
if n_levels > 1:
# Define function that updates options' widgets state
def update_widgets(change):
value = change['new']
# Update shape model parameters
shape_model_parameters_wid.set_widget_state(
n_shape_parameters[value], params_str='Parameter ',
allow_callback=False)
# Update patch options
patch_options_wid.set_widget_state(
n_patches=clm.expert_ensembles[value].n_experts, n_offsets=1,
allow_callback=False)
# Update channels options
image_options_wid.set_widget_state(
n_channels=clm.expert_ensembles[value].spatial_filter_images[
0].n_channels,
image_is_masked=False, allow_callback=True)
# Pyramid radiobuttons
radio_str = OrderedDict()
for l in range(n_levels):
if l == 0:
radio_str["Level {} (low)".format(l)] = l
elif l == n_levels - 1:
radio_str["Level {} (high)".format(l)] = l
else:
radio_str["Level {}".format(l)] = l
level_wid = ipywidgets.RadioButtons(
options=radio_str, description='Pyramid', value=n_levels-1,
layout=ipywidgets.Layout(width='6cm'))
level_wid.observe(update_widgets, names='value', type='change')
level_wid.observe(render_function, names='value', type='change')
tmp_children.insert(0, level_wid)
tmp_wid = ipywidgets.HBox(tmp_children)
options_box = ipywidgets.Tab([tmp_wid, experts_box, image_options_wid,
shape_options_wid, renderer_options_wid,
info_wid, save_figure_wid])
tab_titles = ['Model', 'Experts', 'Image', 'Shape', 'Renderer', 'Info',
'Export']
for (k, tl) in enumerate(tab_titles):
options_box.set_title(k, tl)
logo_wid = LogoWidget(style=main_style)
logo_wid.layout.margin = '0px 10px 0px 0px'
wid = ipywidgets.HBox([logo_wid, options_box])
# Set widget's style
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
# Display final widget
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
def visualize_expert_ensemble(expert_ensemble, centers, figure_size=(7, 7)):
r"""
Widget that allows the dynamic visualization of a multi-scale Ensemble of
Experts.
Parameters
----------
expert_ensemble : `list` of `menpofit.clm.expert.ExpertEnsemble` or subclass
The multi-scale ensemble of experts to be visualized.
centers : `list` of `menpo.shape.PointCloud` or subclass
The centers to set the patches around. If the `list` has only one
`menpo.shape.PointCloud` then this will be used for all expert ensemble
levels. Otherwise, it needs to have the same length as
`expert_ensemble`.
figure_size : (`int`, `int`), optional
The size of the plotted figures.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
print('Initializing...')
# Make sure that expert_ensemble is a list even with one member
if not isinstance(expert_ensemble, list):
expert_ensemble = [expert_ensemble]
# Get the number of levels (i.e. number of expert ensembles)
n_levels = len(expert_ensemble)
# Make sure that centers is a list even with one pointcloud
if not isinstance(centers, list):
centers = [centers] * n_levels
elif isinstance(centers, list) and len(centers) == 1:
centers *= n_levels
# Define the styling options
main_style = 'info'
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# Get selected level
level = level_wid.value if n_levels > 1 else 0
if domain_toggles.value == 'spatial':
patches = _convert_patches_list_to_single_array(
expert_ensemble[level].spatial_filter_images,
expert_ensemble[level].n_experts)
else:
patches = _convert_patches_list_to_single_array(
expert_ensemble[level].frequency_filter_images,
expert_ensemble[level].n_experts)
# Create options dictionary
options = dict()
options.update(shape_options_wid.selected_values['lines'])
options.update(shape_options_wid.selected_values['markers'])
options.update(
renderer_options_wid.selected_values['numbering_matplotlib'])
options.update(renderer_options_wid.selected_values['axes'])
image_options = dict(image_options_wid.selected_values)
del image_options['masked_enabled']
options.update(image_options)
options.update(patch_options_wid.selected_values)
options['line_colour'] = options['line_colour'][0]
options['marker_face_colour'] = options['marker_face_colour'][0]
options['marker_edge_colour'] = options['marker_edge_colour'][0]
# Get figure size
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * figure_size[0],
renderer_options_wid.selected_values['zoom_one'] * figure_size[1])
# show image with selected options
save_figure_wid.renderer = render_patches(
patches=patches, patch_centers=centers[level],
renderer=save_figure_wid.renderer, figure_size=new_figure_size,
**options)
# Update info
update_info(expert_ensemble, patches, level)
# Define function that updates the info text
def update_info(expert_ensemble, patches, level):
# update info widgets
text_per_line = [
"> {} ensemble of experts".format(
name_of_callable(expert_ensemble[level])),
"> Level {}/{}".format(level + 1, n_levels),
"> {} experts.".format(expert_ensemble[level].n_experts),
"> Channels: {}".format(patches.shape[2]),
"> Patch shape: {} x {}".format(
expert_ensemble[level].search_shape[0],
expert_ensemble[level].search_shape[1]),
"> Instance: min={:.3f} , max={:.3f}".format(
patches.min(), patches.max())]
info_wid.set_widget_state(text_per_line=text_per_line)
# Create widgets
shape_options_wid = Shape2DOptionsWidget(labels=None, render_function=None)
shape_options_wid.line_options_wid.render_lines_switch.button_wid.value = False
shape_options_wid.add_render_function(render_function)
patch_options_wid = PatchOptionsWidget(
n_patches=expert_ensemble[0].n_experts, n_offsets=1,
render_function=None)
patch_options_wid.bboxes_line_colour_widget.set_colours(
'white', allow_callback=False)
patch_options_wid.add_render_function(render_function)
patch_options_wid.layout.margin = '10px 0px 0px 0px'
domain_toggles = ipywidgets.ToggleButtons(
description='Domain', options=['spatial', 'frequency'], value='spatial')
domain_toggles.observe(render_function, names='value', type='change')
image_options_wid = ImageOptionsWidget(
n_channels=expert_ensemble[0].spatial_filter_images[0].n_channels,
image_is_masked=False, render_function=None)
image_options_wid.interpolation_checkbox.button_wid.value = False
image_options_wid.cmap_select.value = 'afmhot'
image_options_wid.add_render_function(render_function)
renderer_options_wid = RendererOptionsWidget(
options_tabs=['zoom_one', 'axes', 'numbering_matplotlib'], labels=None,
axes_x_limits=None, axes_y_limits=None,
render_function=render_function)
info_wid = TextPrintWidget(text_per_line=[''])
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
# Group widgets
tmp_children = [domain_toggles]
if n_levels > 1:
# Define function that updates options' widgets state
def update_widgets(change):
value = change['new']
# Update patch options
patch_options_wid.set_widget_state(
n_patches=expert_ensemble[value].n_experts, n_offsets=1,
allow_callback=False)
# Update channels options
image_options_wid.set_widget_state(
n_channels=expert_ensemble[value].spatial_filter_images[
0].n_channels,
image_is_masked=False, allow_callback=True)
# Create pyramid radiobuttons
radio_str = OrderedDict()
for l in range(n_levels):
if l == 0:
radio_str["Level {} (low)".format(l)] = l
elif l == n_levels - 1:
radio_str["Level {} (high)".format(l)] = l
else:
radio_str["Level {}".format(l)] = l
level_wid = ipywidgets.RadioButtons(
options=radio_str, description='Pyramid', value=n_levels-1,
layout=ipywidgets.Layout(width='6cm'))
level_wid.observe(update_widgets, names='value', type='change')
level_wid.observe(render_function, names='value', type='change')
tmp_children.insert(0, level_wid)
tmp_wid = ipywidgets.HBox(tmp_children)
tmp_wid.layout.align_items = 'center'
experts_box = ipywidgets.VBox([tmp_wid, patch_options_wid])
options_box = ipywidgets.Tab([experts_box, image_options_wid,
shape_options_wid, renderer_options_wid,
info_wid, save_figure_wid])
tab_titles = ['Experts', 'Image', 'Shape', 'Renderer', 'Info', 'Export']
for (k, tl) in enumerate(tab_titles):
options_box.set_title(k, tl)
logo_wid = LogoWidget(style=main_style)
logo_wid.layout.margin = '0px 10px 0px 0px'
wid = ipywidgets.HBox([logo_wid, options_box])
# Set widget's style
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
# Display final widget
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
def plot_ced(errors, legend_entries=None, error_range=None,
error_type='me_norm', figure_size=(9, 5), return_widget=False):
r"""
Widget for visualizing the cumulative error curves of the provided errors.
Parameters
----------
errors : `list` of `lists` of `float`
A `list` that stores a `list` of errors to be plotted.
legend_entries : `list` or `str` or ``None``, optional
The `list` of names that will appear on the legend for each curve. If
``None``, then the names format is ``Curve {}.format(i)``.
error_range : `list` of `float` with length 3, optional
Specifies the horizontal axis range, i.e. ::
error_range[0] = min_error
error_range[1] = max_error
error_range[2] = error_step
If ``None``, then ::
error_range = [0., 0.101, 0.005] for error_type = 'me_norm'
error_range = [0., 20., 1.] for error_type = 'me'
error_range = [0., 20., 1.] for error_type = 'rmse'
error_type : ``{'me_norm', 'me', 'rmse'}``, optional
Specifies the type of the provided errors.
figure_size : (`int`, `int`), optional
The initial size of the rendered figure.
return_widget : `bool`, optional
If ``True``, the widget object will be returned so that it can be used
as part of a parent widget. If ``False``, the widget object is not
returned, it is just visualized.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
from menpofit.visualize import plot_cumulative_error_distribution
print('Initializing...')
# Make sure that errors is a list even with one list member
if not isinstance(errors[0], list):
errors = [errors]
# Get number of curves to be plotted
n_curves = len(errors)
# Define the styling options
main_style = 'danger'
# Parse options
if legend_entries is None:
legend_entries = ["Curve {}".format(i) for i in range(n_curves)]
# Get horizontal axis errors
if error_range is None:
if error_type == 'me_norm':
x_axis_limit = 0.05
x_axis_step = 0.005
x_label = 'Normalized Point-to-Point Error'
elif error_type == 'me' or error_type == 'rmse':
x_axis_limit = 5.
x_axis_step = 0.5
x_label = 'Point-to-Point Error'
else:
raise ValueError('error_type must be me_norm or me or rmse')
else:
x_axis_limit = (error_range[1] + error_range[0]) / 2
x_axis_step = error_range[2]
x_label = 'Error'
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# plot with selected options
opts = plot_wid.selected_values.copy()
new_figure_size = (
plot_wid.selected_values['zoom'][0] * figure_size[0],
plot_wid.selected_values['zoom'][1] * figure_size[1])
del opts['zoom']
if opts['axes_x_limits'] is None:
tmp_error_range = None
elif isinstance(opts['axes_x_limits'], float):
tmp_error_range = [0., np.max(errors), x_axis_step]
else:
tmp_error_range = [opts['axes_x_limits'][0],
1.0001 * opts['axes_x_limits'][1],
x_axis_step]
save_figure_wid.renderer = plot_cumulative_error_distribution(
errors, error_range=tmp_error_range,
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size, **opts)
# show plot
save_figure_wid.renderer.force_draw()
# Create widgets
plot_wid = PlotMatplotlibOptionsWidget(
legend_entries=legend_entries, render_function=render_function)
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
# Set values in plot widget
plot_wid.remove_render_function()
plot_wid.x_label.value = x_label
plot_wid.y_label.value = 'Images Proportion'
plot_wid.title.value = 'Cumulative error distribution'
plot_wid.axes_wid.axes_limits_widget.axes_x_limits_toggles.value = 'range'
plot_wid.axes_wid.axes_limits_widget.axes_x_limits_range.set_widget_state(
[0., x_axis_limit], allow_callback=False)
plot_wid.axes_wid.axes_limits_widget.axes_y_limits_toggles.value = 'range'
plot_wid.axes_wid.axes_limits_widget.axes_y_limits_range.set_widget_state(
[0., 1.], allow_callback=False)
plot_wid.axes_wid.axes_ticks_widget.axes_x_ticks_toggles.value = 'auto'
plot_wid.axes_wid.axes_ticks_widget.axes_y_ticks_toggles.value = 'list'
plot_wid.axes_wid.axes_ticks_widget.axes_y_ticks_list.set_widget_state(
list(np.arange(0., 1.1, 0.1)), allow_callback=False)
plot_wid.add_render_function(render_function)
# Group widgets
logo = LogoWidget(style=main_style)
logo.layout.margin = '0px 10px 0px 0px'
tmp_children = list(plot_wid.tab_box.children)
tmp_children.append(save_figure_wid)
plot_wid.tab_box.children = tmp_children
plot_wid.tab_box.set_title(0, 'Labels')
plot_wid.tab_box.set_title(1, 'Style')
plot_wid.tab_box.set_title(2, 'Legend')
plot_wid.tab_box.set_title(3, 'Axes')
plot_wid.tab_box.set_title(4, 'Zoom')
plot_wid.tab_box.set_title(5, 'Grid')
plot_wid.tab_box.set_title(6, 'Export')
# Display final widget
wid = ipywidgets.HBox([logo, plot_wid])
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
plot_wid.container.border = '0px'
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
# return widget object if asked
if return_widget:
return final_box
def visualize_fitting_results(fitting_results, figure_size=(7, 7),
browser_style='buttons',
custom_info_callback=None):
r"""
Widget that allows browsing through a `list` of fitting results.
Parameters
----------
fitting_results : `list` of `menpofit.result.Result` or `subclass`
The `list` of fitting results to be displayed. Note that the fitting
results can have different attributes between them, i.e. different
number of iterations, number of channels etc.
figure_size : (`int`, `int`), optional
The initial size of the plotted figures.
browser_style : ``{'buttons', 'slider'}``, optional
It defines whether the selector of the objects will have the form of
plus/minus buttons or a slider.
custom_info_callback: `function` or ``None``, optional
If not None, it should be a function that accepts a fitting result
and returns a list of custom messages to be printed per result.
Each custom message will be printed in a separate line.
"""
# Ensure that the code is being run inside a Jupyter kernel!
from menpowidgets.utils import verify_ipython_and_kernel
verify_ipython_and_kernel()
print('Initializing...')
# Make sure that fitting_results is a list even with one fitting_result
if not isinstance(fitting_results, list):
fitting_results = [fitting_results]
# Get the number of fitting_results
n_fitting_results = len(fitting_results)
# Define the styling options
main_style = 'info'
# Define function that plots errors curve
def plot_errors_function(name):
# Clear current figure, but wait until the new data to be displayed are
# generated
ipydisplay.clear_output(wait=True)
# Get selected index
i = image_number_wid.selected_values if n_fitting_results > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
save_figure_wid.renderer = fitting_results[i].plot_errors(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Define function that plots displacements curve
def plot_displacements_function(name):
# Clear current figure, but wait until the new data to be displayed are
# generated
ipydisplay.clear_output(wait=True)
# Get selected index
i = image_number_wid.selected_values if n_fitting_results > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
save_figure_wid.renderer = fitting_results[i].plot_displacements(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size, stat_type='mean')
save_figure_wid.renderer.force_draw()
# Define function that plots errors curve
def plot_costs_function(name):
# Clear current figure, but wait until the new data to be displayed are
# generated
ipydisplay.clear_output(wait=True)
# Get selected index
im = image_number_wid.selected_values if n_fitting_results > 1 else 0
# Render
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * 10,
renderer_options_wid.selected_values['zoom_one'] * 3)
save_figure_wid.renderer = fitting_results[im].plot_costs(
figure_id=save_figure_wid.renderer.figure_id, new_figure=False,
figure_size=new_figure_size)
save_figure_wid.renderer.force_draw()
# Define render function
def render_function(change):
# Clear current figure, but wait until the generation of the new data
# that will be rendered
ipydisplay.clear_output(wait=True)
# get selected object
i = image_number_wid.selected_values if n_fitting_results > 1 else 0
# get selected options
tmp1 = renderer_options_wid.selected_values['markers_matplotlib']
tmp2 = renderer_options_wid.selected_values['lines_matplotlib']
options = fitting_result_wid.selected_values
options.update(renderer_options_wid.selected_values['axes'])
options.update(renderer_options_wid.selected_values['legend'])
options.update(renderer_options_wid.selected_values['numbering_matplotlib'])
options.update(renderer_options_wid.selected_values['image_matplotlib'])
new_figure_size = (
renderer_options_wid.selected_values['zoom_one'] * figure_size[0],
renderer_options_wid.selected_values['zoom_one'] * figure_size[1])
# get selected view function
if (fitting_result_wid.result_iterations_tab.selected_index == 0 or
not fitting_results[i].is_iterative):
# use view()
# final shape colour
final_marker_face_colour = tmp1['marker_face_colour'][0]
final_marker_edge_colour = tmp1['marker_edge_colour'][0]
final_line_colour = tmp2['line_colour'][0]
# initial shape colour
initial_marker_face_colour = 'b'
initial_marker_edge_colour = 'b'
initial_line_colour = 'b'
if fitting_results[i].initial_shape is not None:
initial_marker_face_colour = tmp1['marker_face_colour'][1]
initial_marker_edge_colour = tmp1['marker_edge_colour'][1]
initial_line_colour = tmp2['line_colour'][1]
# gt shape colour
gt_marker_face_colour = 'y'
gt_marker_edge_colour = 'y'
gt_line_colour = 'y'
if fitting_results[i].gt_shape is not None:
if fitting_results[i].initial_shape is not None:
gt_marker_face_colour = tmp1['marker_face_colour'][2]
gt_marker_edge_colour = tmp1['marker_edge_colour'][2]
gt_line_colour = tmp2['line_colour'][2]
else:
gt_marker_face_colour = tmp1['marker_face_colour'][1]
gt_marker_edge_colour = tmp1['marker_edge_colour'][1]
gt_line_colour = tmp2['line_colour'][1]
# render
save_figure_wid.renderer = fitting_results[i].view(
figure_id=save_figure_wid.renderer.figure_id,
new_figure=False, render_markers=tmp1['render_markers'],
final_marker_face_colour=final_marker_face_colour,
final_marker_edge_colour=final_marker_edge_colour,
final_line_colour=final_line_colour,
initial_marker_face_colour=initial_marker_face_colour,
initial_marker_edge_colour=initial_marker_edge_colour,
initial_line_colour=initial_line_colour,
gt_marker_face_colour=gt_marker_face_colour,
gt_marker_edge_colour=gt_marker_edge_colour,
gt_line_colour=gt_line_colour,
marker_style=tmp1['marker_style'],
marker_size=tmp1['marker_size'],
marker_edge_width=tmp1['marker_edge_width'],
render_lines=tmp2['render_lines'],
line_style=tmp2['line_style'],
line_width=tmp2['line_width'],
figure_size=new_figure_size, **options)
else:
# use view_iterations()
if fitting_result_wid.iterations_mode.value == 'animation':
# The mode is 'Animation'
# get colours
marker_face_colour = tmp1['marker_face_colour'][0]
marker_edge_colour = tmp1['marker_edge_colour'][0]
line_colour = tmp2['line_colour'][0]
else:
# The mode is 'Static'
# get colours
marker_face_colour = [
tmp1['marker_face_colour'][i]
for i in fitting_result_wid.selected_values['iters']]
marker_edge_colour = [
tmp1['marker_edge_colour'][i]
for i in fitting_result_wid.selected_values['iters']]
line_colour = [
tmp2['line_colour'][i]
for i in fitting_result_wid.selected_values['iters']]
# render
save_figure_wid.renderer = fitting_results[i].view_iterations(
figure_id=save_figure_wid.renderer.figure_id,
new_figure=False, render_markers=tmp1['render_markers'],
marker_face_colour=marker_face_colour,
marker_style=tmp1['marker_style'],
marker_size=tmp1['marker_size'],
marker_edge_colour=marker_edge_colour,
marker_edge_width=tmp1['marker_edge_width'],
render_lines=tmp2['render_lines'],
line_style=tmp2['line_style'], line_width=tmp2['line_width'],
line_colour=line_colour, figure_size=new_figure_size,
**options)
# Show figure
save_figure_wid.renderer.force_draw()
# update info text widget
update_info({}, custom_info_callback=custom_info_callback)
# Define function that updates info text
def update_info(change, custom_info_callback=None):
# Get selected object
im = image_number_wid.selected_values if n_fitting_results > 1 else 0
fr = fitting_results[im]
# Errors
text_per_line = []
if fr.gt_shape is not None:
# Get error function
error_fun = euclidean_bb_normalised_error
if error_type_toggles.value == 'RMS':
error_fun = root_mean_square_bb_normalised_error
# Set error options visibility
error_box.layout.visibility = 'visible'
# Compute errors
if fr.initial_shape is not None:
text_per_line.append(' > Initial error: {:.4f}'.format(
error_fun(fr.initial_shape, fr.gt_shape,
norm_type=norm_type_toggles.value)))
text_per_line.append(' > Final error: {:.4f}'.format(
error_fun(fr.final_shape, fr.gt_shape,
norm_type=norm_type_toggles.value)))
else:
# Set error options visibility
error_box.layout.visibility = 'hidden'
text_per_line.append(' > No groundtruth shape.')
# Landmarks, scales, iterations
text_per_line.append(' > {} landmark points.'.format(
fr.final_shape.n_points))
if fr.is_iterative:
text_per_line.append(' > {} iterations.'.format(fr.n_iters))
else:
text_per_line.append(' > No iterations.')
if hasattr(fr, 'n_scales'):
text_per_line.append(' > {} scales.'.format(fr.n_scales))
if custom_info_callback is not None:
# iterate over the list of messages returned by the callback
# function and append them in the text_per_line.
for msg in custom_info_callback(fr):
text_per_line.append('> {}'.format(msg))
info_wid.set_widget_state(text_per_line=text_per_line)
# Create renderer widget
labels = ['Final']
default_colours = ['red']
if fitting_results[0].initial_shape is not None:
labels.append('Initial')
default_colours.append('blue')
if fitting_results[0].gt_shape is not None:
labels.append('Groundtruth')
default_colours.append('yellow')
renderer_options_wid = RendererOptionsWidget(
options_tabs=['markers_matplotlib', 'lines_matplotlib', 'zoom_one',
'legend', 'numbering_matplotlib', 'image_matplotlib',
'axes'],
labels=labels, axes_x_limits=None, axes_y_limits=None,
render_function=None)
# Set initial values
renderer_options_wid.options_widgets[3].render_legend_switch.set_widget_state(
True, allow_callback=False)
renderer_options_wid.options_widgets[0].marker_face_colour_widget.set_colours(
default_colours, allow_callback=False)
renderer_options_wid.options_widgets[0].marker_edge_colour_widget.set_colours(
['black'] * len(default_colours), allow_callback=False)
renderer_options_wid.options_widgets[1].line_colour_widget.set_colours(
default_colours, allow_callback=False)
renderer_options_wid.add_render_function(render_function)
# Create info and error options
info_wid = TextPrintWidget(text_per_line=[''])
error_type_toggles = ipywidgets.ToggleButtons(
options=['Euclidean', 'RMS'], value='Euclidean',
description='Error type')
norm_type_toggles = ipywidgets.ToggleButtons(
options=['area', 'perimeter', 'avg_edge_length', 'diagonal'],
value='avg_edge_length', description='Normalise')
error_box = ipywidgets.VBox([error_type_toggles, norm_type_toggles])
error_box.layout.display = (
'flex' if fitting_results[0].gt_shape is not None else 'none')
error_type_toggles.observe(update_info, names='value', type='change')
norm_type_toggles.observe(update_info, names='value', type='change')
info_error_box = ipywidgets.HBox([info_wid, error_box])
# Create save figure widget
save_figure_wid = SaveMatplotlibFigureOptionsWidget()
def update_renderer_options(change):
# Get selected fitting result object
i = image_number_wid.selected_values if n_fitting_results > 1 else 0
# Get labels
if fitting_result_wid.result_iterations_tab.selected_index == 0:
# We are at the Results tab
labels = ['Final']
if fitting_results[i].initial_shape is not None:
labels.append('Initial')
if fitting_results[i].gt_shape is not None:
labels.append('Groundtruth')
else:
# We are at the Iterations tab
if fitting_result_wid.iterations_mode.value == 'animation':
# The mode is 'Animation'
labels = None
else:
# The mode is 'Static'
n_digits = len(str(len(fitting_results[i].shapes)))
labels = []
for j in list(range(len(fitting_results[i].shapes))):
if j == 0 and fitting_results[i].initial_shape is not None:
labels.append('Initial')
elif j == len(fitting_results[i].shapes) - 1:
labels.append('Final')
else:
labels.append("iteration {:0{}d}".format(j, n_digits))
renderer_options_wid.set_widget_state(labels=labels,
allow_callback=False)
n_shapes = None
if fitting_results[0].is_iterative:
n_shapes = len(fitting_results[0].shapes)
has_costs = (fitting_results[0].is_iterative and
fitting_results[0].costs is not None)
fitting_result_wid = IterativeResultOptionsWidget(
has_gt_shape=fitting_results[0].gt_shape is not None,
has_initial_shape=fitting_results[0].initial_shape is not None,
has_image=fitting_results[0].image is not None,
n_shapes=n_shapes, has_costs=has_costs, render_function=render_function,
tab_update_function=update_renderer_options,
displacements_function=plot_displacements_function,
errors_function=plot_errors_function,
costs_function=plot_costs_function)
# Group widgets
if n_fitting_results > 1:
# Define function that updates options' widgets state
def update_widgets(change):
# stop iterations animation
fitting_result_wid.index_animation.stop_animation()
# get selected fitting result
i = image_number_wid.selected_values
# Update fitting result options
n_shapes = None
if fitting_results[i].is_iterative:
n_shapes = len(fitting_results[i].shapes)
has_costs = (fitting_results[i].is_iterative and
fitting_results[i].costs is not None)
fitting_result_wid.set_widget_state(
has_gt_shape=fitting_results[i].gt_shape is not None,
has_initial_shape=fitting_results[i].initial_shape is not None,
has_image=fitting_results[i].image is not None,
n_shapes=n_shapes, has_costs=has_costs, allow_callback=False)
# Update renderer options
update_renderer_options({})
# Render callback
render_function({})
# Image selection slider
index = {'min': 0, 'max': n_fitting_results - 1, 'step': 1, 'index': 0}
image_number_wid = AnimationOptionsWidget(
index, render_function=update_widgets, index_style=browser_style,
interval=0.2, description='Image', loop_enabled=True)
# Header widget
logo_wid = LogoWidget(style=main_style)
logo_wid.layout.margin = '0px 10px 0px 0px'
header_wid = ipywidgets.HBox([logo_wid, image_number_wid])
header_wid.layout.align_items = 'center'
header_wid.layout.margin = '0px 0px 10px 0px'
else:
# Header widget
header_wid = LogoWidget(style=main_style)
header_wid.layout.margin = '0px 10px 0px 0px'
# Widget titles
tab_titles = ['Result', 'Info', 'Renderer', 'Export']
options_box = ipywidgets.Tab([fitting_result_wid, info_error_box,
renderer_options_wid, save_figure_wid])
for (k, tl) in enumerate(tab_titles):
options_box.set_title(k, tl)
if n_fitting_results > 1:
wid = ipywidgets.VBox([header_wid, options_box])
else:
wid = ipywidgets.HBox([header_wid, options_box])
if n_fitting_results > 1:
# If animation is activated and the user selects the save figure tab,
# then the animation stops.
def save_fig_tab_fun(change):
if change['new'] == 3:
image_number_wid.pause_animation()
options_box.observe(save_fig_tab_fun, names='selected_index',
type='change')
# Set widget's style
wid.box_style = main_style
wid.layout.border = '2px solid ' + map_styles_to_hex_colours(main_style)
# Display final widget
final_box = ipywidgets.Box([wid])
final_box.layout.display = 'flex'
ipydisplay.display(final_box)
# Trigger initial visualization
render_function({})
| 44.891738
| 84
| 0.651901
| 11,539
| 94,542
| 5.049398
| 0.044458
| 0.02952
| 0.029469
| 0.032953
| 0.853102
| 0.825847
| 0.800858
| 0.788398
| 0.768489
| 0.749678
| 0
| 0.008665
| 0.257801
| 94,542
| 2,105
| 85
| 44.913064
| 0.821688
| 0.189789
| 0
| 0.697388
| 0
| 0
| 0.077308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030479
| false
| 0
| 0.019594
| 0
| 0.050798
| 0.005806
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9ef4dd7ed65b25e6b52d08b7cf5449d79e28a8f5
| 1,463
|
py
|
Python
|
cnns/foolbox/foolbox_2_3_0/tests/v1/attacks/test_attacks_spatial.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | 12
|
2021-07-27T07:18:24.000Z
|
2022-03-09T13:52:20.000Z
|
cnns/foolbox/foolbox_2_3_0/tests/v1/attacks/test_attacks_spatial.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | 2
|
2021-08-03T09:21:33.000Z
|
2021-12-29T14:25:30.000Z
|
cnns/foolbox/foolbox_2_3_0/tests/v1/attacks/test_attacks_spatial.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | 3
|
2021-11-18T14:46:40.000Z
|
2022-01-03T15:47:23.000Z
|
import numpy as np
from foolbox.v1.attacks import SpatialAttack as Attack
def test_attack_pytorch(bn_adversarial_pytorch):
adv = bn_adversarial_pytorch
attack = Attack()
attack(adv)
assert adv.perturbed is not None
assert adv.distance.value < np.inf
def test_attack(bn_adversarial):
adv = bn_adversarial
attack = Attack()
attack(adv)
assert adv.perturbed is not None
assert adv.distance.value < np.inf
def test_attack_rnd(bn_adversarial):
adv = bn_adversarial
attack = Attack()
attack(adv, random_sampling=True)
assert adv.perturbed is not None
assert adv.distance.value < np.inf
def test_attack_norot(bn_adversarial):
adv = bn_adversarial
attack = Attack()
attack(adv, do_rotations=False)
assert adv.perturbed is not None
assert adv.distance.value < np.inf
def test_attack_notrans(bn_adversarial):
adv = bn_adversarial
attack = Attack()
attack(adv, do_translations=False)
assert adv.perturbed is not None
assert adv.distance.value < np.inf
def test_attack_notrans_norot(bn_adversarial):
adv = bn_adversarial
attack = Attack()
attack(adv, do_translations=False, do_rotations=False)
assert adv.perturbed is None
assert adv.distance.value == np.inf
def test_attack_gl(gl_bn_adversarial):
adv = gl_bn_adversarial
attack = Attack()
attack(adv)
assert adv.perturbed is not None
assert adv.distance.value < np.inf
| 24.383333
| 58
| 0.722488
| 205
| 1,463
| 4.97561
| 0.170732
| 0.178431
| 0.089216
| 0.144118
| 0.835294
| 0.835294
| 0.835294
| 0.791176
| 0.791176
| 0.726471
| 0
| 0.000856
| 0.20164
| 1,463
| 59
| 59
| 24.79661
| 0.872432
| 0
| 0
| 0.613636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.318182
| 1
| 0.159091
| false
| 0
| 0.045455
| 0
| 0.204545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7300afd726a07d6905e90d85cff5557956b5e6e0
| 60,193
|
py
|
Python
|
pybind/slxos/v16r_1_00b/mpls_state/ldp/fec/ldp_fec_prefixes/key/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/mpls_state/ldp/fec/ldp_fec_prefixes/key/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/mpls_state/ldp/fec/ldp_fec_prefixes/key/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class key(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls-operational - based on the path /mpls-state/ldp/fec/ldp-fec-prefixes/key. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__type','__prefix_len','__inet_addr_type','__inet_addr_len','__inet_addr','__vc_ldp_peer_id','__data_fec_type','__vc_type','__vc_control_word_config','__vc_use_control_word','__vc_group_id','__vc_id','__vc_agi_offset','__vc_agi_len','__vc_saii_offset','__vc_saii_len','__vc_taii_offset','__vc_taii_len','__vc_gen_id_size','__vc_gen_id',)
_yang_name = 'key'
_rest_name = 'key'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__vc_agi_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-len", rest_name="vc-agi-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_taii_offset = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-offset", rest_name="vc-taii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_ldp_peer_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="vc-ldp-peer-id", rest_name="vc-ldp-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
self.__vc_taii_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-len", rest_name="vc-taii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__data_fec_type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="data-fec-type", rest_name="data-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_group_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-group-id", rest_name="vc-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__inet_addr = YANGDynClass(base=unicode, is_leaf=True, yang_name="inet-addr", rest_name="inet-addr", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
self.__inet_addr_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-len", rest_name="inet-addr-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_gen_id_size = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-gen-id-size", rest_name="vc-gen-id-size", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)
self.__vc_type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-type", rest_name="vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)
self.__inet_addr_type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-type", rest_name="inet-addr-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-id", rest_name="vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__vc_agi_offset = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-offset", rest_name="vc-agi-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_saii_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-len", rest_name="vc-saii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__prefix_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="prefix-len", rest_name="prefix-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_gen_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="vc-gen-id", rest_name="vc-gen-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
self.__type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="type", rest_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_use_control_word = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-use-control-word", rest_name="vc-use-control-word", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_saii_offset = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-offset", rest_name="vc-saii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__vc_control_word_config = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-control-word-config", rest_name="vc-control-word-config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-state', u'ldp', u'fec', u'ldp-fec-prefixes', u'key']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'mpls-state', u'ldp', u'fec', u'ldp-fec-prefixes', u'key']
def _get_type(self):
"""
Getter method for type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/type (uint8)
YANG Description: type
"""
return self.__type
def _set_type(self, v, load=False):
"""
Setter method for type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/type (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_type() directly.
YANG Description: type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="type", rest_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """type must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="type", rest_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__type = t
if hasattr(self, '_set'):
self._set()
def _unset_type(self):
self.__type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="type", rest_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_prefix_len(self):
"""
Getter method for prefix_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/prefix_len (uint8)
YANG Description: prefix_len
"""
return self.__prefix_len
def _set_prefix_len(self, v, load=False):
"""
Setter method for prefix_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/prefix_len (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_prefix_len is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prefix_len() directly.
YANG Description: prefix_len
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="prefix-len", rest_name="prefix-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prefix_len must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="prefix-len", rest_name="prefix-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__prefix_len = t
if hasattr(self, '_set'):
self._set()
def _unset_prefix_len(self):
self.__prefix_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="prefix-len", rest_name="prefix-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_inet_addr_type(self):
"""
Getter method for inet_addr_type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/inet_addr_type (uint8)
YANG Description: inet_addr_type
"""
return self.__inet_addr_type
def _set_inet_addr_type(self, v, load=False):
"""
Setter method for inet_addr_type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/inet_addr_type (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_inet_addr_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_inet_addr_type() directly.
YANG Description: inet_addr_type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-type", rest_name="inet-addr-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """inet_addr_type must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-type", rest_name="inet-addr-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__inet_addr_type = t
if hasattr(self, '_set'):
self._set()
def _unset_inet_addr_type(self):
self.__inet_addr_type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-type", rest_name="inet-addr-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_inet_addr_len(self):
"""
Getter method for inet_addr_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/inet_addr_len (uint8)
YANG Description: inet_addr_len
"""
return self.__inet_addr_len
def _set_inet_addr_len(self, v, load=False):
"""
Setter method for inet_addr_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/inet_addr_len (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_inet_addr_len is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_inet_addr_len() directly.
YANG Description: inet_addr_len
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-len", rest_name="inet-addr-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """inet_addr_len must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-len", rest_name="inet-addr-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__inet_addr_len = t
if hasattr(self, '_set'):
self._set()
def _unset_inet_addr_len(self):
self.__inet_addr_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="inet-addr-len", rest_name="inet-addr-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_inet_addr(self):
"""
Getter method for inet_addr, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/inet_addr (string)
YANG Description: inet_addr
"""
return self.__inet_addr
def _set_inet_addr(self, v, load=False):
"""
Setter method for inet_addr, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/inet_addr (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_inet_addr is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_inet_addr() directly.
YANG Description: inet_addr
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="inet-addr", rest_name="inet-addr", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """inet_addr must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="inet-addr", rest_name="inet-addr", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)""",
})
self.__inet_addr = t
if hasattr(self, '_set'):
self._set()
def _unset_inet_addr(self):
self.__inet_addr = YANGDynClass(base=unicode, is_leaf=True, yang_name="inet-addr", rest_name="inet-addr", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
def _get_vc_ldp_peer_id(self):
"""
Getter method for vc_ldp_peer_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_ldp_peer_id (string)
YANG Description: vc_ldp_peer_id
"""
return self.__vc_ldp_peer_id
def _set_vc_ldp_peer_id(self, v, load=False):
"""
Setter method for vc_ldp_peer_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_ldp_peer_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_ldp_peer_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_ldp_peer_id() directly.
YANG Description: vc_ldp_peer_id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="vc-ldp-peer-id", rest_name="vc-ldp-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_ldp_peer_id must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="vc-ldp-peer-id", rest_name="vc-ldp-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)""",
})
self.__vc_ldp_peer_id = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_ldp_peer_id(self):
self.__vc_ldp_peer_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="vc-ldp-peer-id", rest_name="vc-ldp-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
def _get_data_fec_type(self):
"""
Getter method for data_fec_type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/data_fec_type (uint8)
YANG Description: data_fec_type
"""
return self.__data_fec_type
def _set_data_fec_type(self, v, load=False):
"""
Setter method for data_fec_type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/data_fec_type (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_data_fec_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_data_fec_type() directly.
YANG Description: data_fec_type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="data-fec-type", rest_name="data-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """data_fec_type must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="data-fec-type", rest_name="data-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__data_fec_type = t
if hasattr(self, '_set'):
self._set()
def _unset_data_fec_type(self):
self.__data_fec_type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="data-fec-type", rest_name="data-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_type(self):
"""
Getter method for vc_type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_type (uint16)
YANG Description: vc_type
"""
return self.__vc_type
def _set_vc_type(self, v, load=False):
"""
Setter method for vc_type, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_type (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_type() directly.
YANG Description: vc_type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-type", rest_name="vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_type must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-type", rest_name="vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)""",
})
self.__vc_type = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_type(self):
self.__vc_type = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-type", rest_name="vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)
def _get_vc_control_word_config(self):
"""
Getter method for vc_control_word_config, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_control_word_config (uint8)
YANG Description: vc_control_word_config
"""
return self.__vc_control_word_config
def _set_vc_control_word_config(self, v, load=False):
"""
Setter method for vc_control_word_config, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_control_word_config (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_control_word_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_control_word_config() directly.
YANG Description: vc_control_word_config
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-control-word-config", rest_name="vc-control-word-config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_control_word_config must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-control-word-config", rest_name="vc-control-word-config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_control_word_config = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_control_word_config(self):
self.__vc_control_word_config = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-control-word-config", rest_name="vc-control-word-config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_use_control_word(self):
"""
Getter method for vc_use_control_word, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_use_control_word (uint8)
YANG Description: vc_use_control_word
"""
return self.__vc_use_control_word
def _set_vc_use_control_word(self, v, load=False):
"""
Setter method for vc_use_control_word, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_use_control_word (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_use_control_word is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_use_control_word() directly.
YANG Description: vc_use_control_word
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-use-control-word", rest_name="vc-use-control-word", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_use_control_word must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-use-control-word", rest_name="vc-use-control-word", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_use_control_word = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_use_control_word(self):
self.__vc_use_control_word = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-use-control-word", rest_name="vc-use-control-word", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_group_id(self):
"""
Getter method for vc_group_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_group_id (uint32)
YANG Description: vc_group_id
"""
return self.__vc_group_id
def _set_vc_group_id(self, v, load=False):
"""
Setter method for vc_group_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_group_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_group_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_group_id() directly.
YANG Description: vc_group_id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-group-id", rest_name="vc-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_group_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-group-id", rest_name="vc-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__vc_group_id = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_group_id(self):
self.__vc_group_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-group-id", rest_name="vc-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_vc_id(self):
"""
Getter method for vc_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_id (uint32)
YANG Description: vc_id
"""
return self.__vc_id
def _set_vc_id(self, v, load=False):
"""
Setter method for vc_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_id() directly.
YANG Description: vc_id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-id", rest_name="vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-id", rest_name="vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__vc_id = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_id(self):
self.__vc_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="vc-id", rest_name="vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_vc_agi_offset(self):
"""
Getter method for vc_agi_offset, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_agi_offset (uint8)
YANG Description: vc_agi_offset
"""
return self.__vc_agi_offset
def _set_vc_agi_offset(self, v, load=False):
"""
Setter method for vc_agi_offset, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_agi_offset (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_agi_offset is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_agi_offset() directly.
YANG Description: vc_agi_offset
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-offset", rest_name="vc-agi-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_agi_offset must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-offset", rest_name="vc-agi-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_agi_offset = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_agi_offset(self):
self.__vc_agi_offset = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-offset", rest_name="vc-agi-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_agi_len(self):
"""
Getter method for vc_agi_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_agi_len (uint8)
YANG Description: vc_agi_len
"""
return self.__vc_agi_len
def _set_vc_agi_len(self, v, load=False):
"""
Setter method for vc_agi_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_agi_len (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_agi_len is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_agi_len() directly.
YANG Description: vc_agi_len
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-len", rest_name="vc-agi-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_agi_len must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-len", rest_name="vc-agi-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_agi_len = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_agi_len(self):
self.__vc_agi_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-agi-len", rest_name="vc-agi-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_saii_offset(self):
"""
Getter method for vc_saii_offset, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_saii_offset (uint8)
YANG Description: vc_saii_offset
"""
return self.__vc_saii_offset
def _set_vc_saii_offset(self, v, load=False):
"""
Setter method for vc_saii_offset, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_saii_offset (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_saii_offset is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_saii_offset() directly.
YANG Description: vc_saii_offset
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-offset", rest_name="vc-saii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_saii_offset must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-offset", rest_name="vc-saii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_saii_offset = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_saii_offset(self):
self.__vc_saii_offset = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-offset", rest_name="vc-saii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_saii_len(self):
"""
Getter method for vc_saii_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_saii_len (uint8)
YANG Description: vc_saii_len
"""
return self.__vc_saii_len
def _set_vc_saii_len(self, v, load=False):
"""
Setter method for vc_saii_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_saii_len (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_saii_len is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_saii_len() directly.
YANG Description: vc_saii_len
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-len", rest_name="vc-saii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_saii_len must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-len", rest_name="vc-saii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_saii_len = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_saii_len(self):
self.__vc_saii_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-saii-len", rest_name="vc-saii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_taii_offset(self):
"""
Getter method for vc_taii_offset, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_taii_offset (uint8)
YANG Description: vc_taii_offset
"""
return self.__vc_taii_offset
def _set_vc_taii_offset(self, v, load=False):
"""
Setter method for vc_taii_offset, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_taii_offset (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_taii_offset is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_taii_offset() directly.
YANG Description: vc_taii_offset
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-offset", rest_name="vc-taii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_taii_offset must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-offset", rest_name="vc-taii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_taii_offset = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_taii_offset(self):
self.__vc_taii_offset = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-offset", rest_name="vc-taii-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_taii_len(self):
"""
Getter method for vc_taii_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_taii_len (uint8)
YANG Description: vc_taii_len
"""
return self.__vc_taii_len
def _set_vc_taii_len(self, v, load=False):
"""
Setter method for vc_taii_len, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_taii_len (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_taii_len is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_taii_len() directly.
YANG Description: vc_taii_len
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-len", rest_name="vc-taii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_taii_len must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-len", rest_name="vc-taii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__vc_taii_len = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_taii_len(self):
self.__vc_taii_len = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="vc-taii-len", rest_name="vc-taii-len", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_vc_gen_id_size(self):
"""
Getter method for vc_gen_id_size, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_gen_id_size (uint16)
YANG Description: vc_gen_id_size
"""
return self.__vc_gen_id_size
def _set_vc_gen_id_size(self, v, load=False):
"""
Setter method for vc_gen_id_size, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_gen_id_size (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_gen_id_size is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_gen_id_size() directly.
YANG Description: vc_gen_id_size
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-gen-id-size", rest_name="vc-gen-id-size", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_gen_id_size must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-gen-id-size", rest_name="vc-gen-id-size", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)""",
})
self.__vc_gen_id_size = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_gen_id_size(self):
self.__vc_gen_id_size = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="vc-gen-id-size", rest_name="vc-gen-id-size", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint16', is_config=False)
def _get_vc_gen_id(self):
"""
Getter method for vc_gen_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_gen_id (string)
YANG Description: vc_gen_id
"""
return self.__vc_gen_id
def _set_vc_gen_id(self, v, load=False):
"""
Setter method for vc_gen_id, mapped from YANG variable /mpls_state/ldp/fec/ldp_fec_prefixes/key/vc_gen_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_vc_gen_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vc_gen_id() directly.
YANG Description: vc_gen_id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="vc-gen-id", rest_name="vc-gen-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vc_gen_id must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="vc-gen-id", rest_name="vc-gen-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)""",
})
self.__vc_gen_id = t
if hasattr(self, '_set'):
self._set()
def _unset_vc_gen_id(self):
self.__vc_gen_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="vc-gen-id", rest_name="vc-gen-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
type = __builtin__.property(_get_type)
prefix_len = __builtin__.property(_get_prefix_len)
inet_addr_type = __builtin__.property(_get_inet_addr_type)
inet_addr_len = __builtin__.property(_get_inet_addr_len)
inet_addr = __builtin__.property(_get_inet_addr)
vc_ldp_peer_id = __builtin__.property(_get_vc_ldp_peer_id)
data_fec_type = __builtin__.property(_get_data_fec_type)
vc_type = __builtin__.property(_get_vc_type)
vc_control_word_config = __builtin__.property(_get_vc_control_word_config)
vc_use_control_word = __builtin__.property(_get_vc_use_control_word)
vc_group_id = __builtin__.property(_get_vc_group_id)
vc_id = __builtin__.property(_get_vc_id)
vc_agi_offset = __builtin__.property(_get_vc_agi_offset)
vc_agi_len = __builtin__.property(_get_vc_agi_len)
vc_saii_offset = __builtin__.property(_get_vc_saii_offset)
vc_saii_len = __builtin__.property(_get_vc_saii_len)
vc_taii_offset = __builtin__.property(_get_vc_taii_offset)
vc_taii_len = __builtin__.property(_get_vc_taii_len)
vc_gen_id_size = __builtin__.property(_get_vc_gen_id_size)
vc_gen_id = __builtin__.property(_get_vc_gen_id)
_pyangbind_elements = {'type': type, 'prefix_len': prefix_len, 'inet_addr_type': inet_addr_type, 'inet_addr_len': inet_addr_len, 'inet_addr': inet_addr, 'vc_ldp_peer_id': vc_ldp_peer_id, 'data_fec_type': data_fec_type, 'vc_type': vc_type, 'vc_control_word_config': vc_control_word_config, 'vc_use_control_word': vc_use_control_word, 'vc_group_id': vc_group_id, 'vc_id': vc_id, 'vc_agi_offset': vc_agi_offset, 'vc_agi_len': vc_agi_len, 'vc_saii_offset': vc_saii_offset, 'vc_saii_len': vc_saii_len, 'vc_taii_offset': vc_taii_offset, 'vc_taii_len': vc_taii_len, 'vc_gen_id_size': vc_gen_id_size, 'vc_gen_id': vc_gen_id, }
| 69.346774
| 620
| 0.743508
| 8,750
| 60,193
| 4.806629
| 0.021714
| 0.041371
| 0.054591
| 0.053926
| 0.94013
| 0.899853
| 0.881212
| 0.87541
| 0.86345
| 0.855152
| 0
| 0.011402
| 0.122838
| 60,193
| 867
| 621
| 69.426759
| 0.785166
| 0.18341
| 0
| 0.517094
| 0
| 0.042735
| 0.360401
| 0.205715
| 0
| 0
| 0
| 0
| 0
| 1
| 0.134615
| false
| 0
| 0.017094
| 0
| 0.260684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7303952ac258316c302460f0c117117f3ba2fccf
| 246
|
py
|
Python
|
ETCetera/Systems/Automata/__init__.py
|
ggleizer/ETCetera
|
8fa9f3c82fd1944507a0c02d52a236244821f3ca
|
[
"MIT"
] | null | null | null |
ETCetera/Systems/Automata/__init__.py
|
ggleizer/ETCetera
|
8fa9f3c82fd1944507a0c02d52a236244821f3ca
|
[
"MIT"
] | null | null | null |
ETCetera/Systems/Automata/__init__.py
|
ggleizer/ETCetera
|
8fa9f3c82fd1944507a0c02d52a236244821f3ca
|
[
"MIT"
] | 1
|
2022-03-11T11:15:20.000Z
|
2022-03-11T11:15:20.000Z
|
from ETCetera.Systems.Automata.automaton import Automaton
# from network_timed_automata import NTA
from ETCetera.Systems.Automata.timed_automaton import TimedAutomaton
from ETCetera.Systems.Automata.timed_game_automaton import TimedGameAutomaton
| 49.2
| 77
| 0.890244
| 30
| 246
| 7.133333
| 0.4
| 0.168224
| 0.266355
| 0.378505
| 0.299065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069106
| 246
| 4
| 78
| 61.5
| 0.934498
| 0.154472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
731e114cbb597bea1377ac925526bc56286334e9
| 86
|
py
|
Python
|
CozmOSU/Actions/__init__.py
|
OSU-cozmo/OSU-Cozmo-Library
|
ea8a06877cbdc88b6f730df92c2d40d4f53470cc
|
[
"MIT"
] | null | null | null |
CozmOSU/Actions/__init__.py
|
OSU-cozmo/OSU-Cozmo-Library
|
ea8a06877cbdc88b6f730df92c2d40d4f53470cc
|
[
"MIT"
] | 1
|
2018-09-21T06:05:49.000Z
|
2018-09-21T06:05:49.000Z
|
CozmOSU/Actions/__init__.py
|
OSU-cozmo/OSU-Cozmo-Library
|
ea8a06877cbdc88b6f730df92c2d40d4f53470cc
|
[
"MIT"
] | null | null | null |
from .Speech import *
from .Lighting import *
from .Head import *
from .Lift import *
| 17.2
| 23
| 0.72093
| 12
| 86
| 5.166667
| 0.5
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 86
| 4
| 24
| 21.5
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b40302d299ea3da6a334e2730590060faa85fe49
| 25,628
|
py
|
Python
|
pysnmp-with-texts/NBS-CMMCENUM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/NBS-CMMCENUM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/NBS-CMMCENUM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module NBS-CMMCENUM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NBS-CMMCENUM-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:17:11 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint")
nbs, = mibBuilder.importSymbols("NBS-MIB", "nbs")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, IpAddress, NotificationType, Unsigned32, MibIdentifier, Counter64, Counter32, Gauge32, ModuleIdentity, iso, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "IpAddress", "NotificationType", "Unsigned32", "MibIdentifier", "Counter64", "Counter32", "Gauge32", "ModuleIdentity", "iso", "ObjectIdentity")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
nbsCmmcEnumMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 629, 225))
if mibBuilder.loadTexts: nbsCmmcEnumMib.setLastUpdated('201503120000Z')
if mibBuilder.loadTexts: nbsCmmcEnumMib.setOrganization('NBS')
if mibBuilder.loadTexts: nbsCmmcEnumMib.setContactInfo('For technical support, please contact your service channel')
if mibBuilder.loadTexts: nbsCmmcEnumMib.setDescription('This MIB module defines some frequently updated lists for NBS-CMMC-MIB.')
class NbsCmmcEnumChassisType(TextualConvention, Integer32):
description = 'The type of Chassis.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39))
namedValues = NamedValues(("other", 1), ("bu16", 2), ("bu4", 3), ("bu1", 4), ("bu5", 5), ("bu3", 6), ("bu2", 7), ("fCpe", 8), ("bmc", 9), ("virtual16", 10), ("bu21", 11), ("bu42", 12), ("virtual1", 13), ("virtual2", 14), ("virtual3", 15), ("virtual4", 16), ("bu22", 17), ("bu82", 18), ("bu3v", 19), ("virtual3v", 20), ("bu12", 21), ("occ48", 22), ("occ96", 23), ("occ128", 24), ("occ320", 25), ("od48", 26), ("virtod48", 27), ("od12", 28), ("virtod12", 29), ("od16", 30), ("virtod16", 31), ("od32", 32), ("virtod32", 33), ("od16lc", 34), ("virtod16lc", 35), ("od6", 36), ("virtod6", 37), ("od4", 38), ("virtod4", 39))
class NbsCmmcEnumSlotOperationType(TextualConvention, Integer32):
description = 'Mode, or primary function, of card in slot'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45))
namedValues = NamedValues(("other", 1), ("management", 2), ("converter", 3), ("repeater", 4), ("switch", 5), ("splitterCombiner", 6), ("fastRepeater", 7), ("gigabitRepeater", 8), ("monitor", 9), ("opticSwitch", 10), ("remote", 11), ("redundant", 12), ("centralOffice", 13), ("customerPremise", 14), ("multiplexer", 15), ("deprecated16", 16), ("deprecated17", 17), ("deprecated18", 18), ("optAmpBoosterAGC", 19), ("optAmpBoosterAPC", 20), ("optAmpInlineAGC", 21), ("optAmpInlineAPC", 22), ("optAmpPreampAGC", 23), ("optAmpPreampAPC", 24), ("coDualActive", 25), ("coDualInactive", 26), ("physLayerSwitch", 27), ("packetMux", 28), ("optAmpVariableGain", 29), ("optAmpMidstageAGC", 30), ("optAmpMidstageAPC", 31), ("multiCO1g", 32), ("multiCO10g", 33), ("addDropMux", 34), ("multicast", 35), ("optAttenuator", 36), ("repeater40G", 37), ("multiplexer4x10G", 38), ("optAmpPreampAPPC", 39), ("optPassive", 40), ("transponder", 41), ("muxponder", 42), ("addWssDropSplitter", 43), ("dropWssAddCombiner", 44), ("dualAddWssDropSplitter", 45))
class NbsCmmcEnumSlotType(TextualConvention, Integer32):
description = "This data type is used as the syntax of the nbsCmmcSlotType object in the definition of NBS-CMMC-MIB's nbsCmmcSlotTable. This object is used internally by Manager, and is not useful to most end-users."
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254), SingleValueConstraint(255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509), SingleValueConstraint(510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757))
namedValues = NamedValues(("empty0", 0), ("empty1", 1), ("empty2", 2), ("empty3", 3), ("em316gs1", 4), ("em316gs2", 5), ("em316gs3", 6), ("em316fms1", 7), ("em316fms2", 8), ("em316fms3", 9), ("em316as1", 10), ("em316as2", 11), ("em316as3", 12), ("em316fds1", 13), ("em316fds2", 14), ("em316fds3", 15), ("em316o3s1", 16), ("em316o3s2", 17), ("em316o3s3", 18), ("em316o12s1", 19), ("em316o12s2", 20), ("em316o12s3", 21), ("em316gsfs1", 22), ("em316gsfs2", 23), ("em316gsfs3", 24), ("em316fsfs1", 25), ("em316fsfs2", 26), ("em316fsfsx", 27), ("em316fsfsz", 28), ("em316fmsfs1", 29), ("em316fmsfs2", 30), ("em316fmsfs3", 31), ("em316asfs2", 32), ("em316asfs3", 33), ("em316fdsfs2", 34), ("em316fdsfs3", 35), ("em316o3sfs2", 36), ("em316o3sfs3", 37), ("em316o12sfs2", 38), ("em316o12sfs3", 39), ("em316em", 40), ("em316emx", 41), ("em316es", 42), ("em316esx", 43), ("em315esz", 44), ("em316fm", 45), ("em316fs1", 46), ("em316fs2", 47), ("em316fsx", 48), ("em315fsz", 49), ("em3162swm", 50), ("em3162sws1", 51), ("em3162sws2", 52), ("em3162sws3a", 53), ("em3162sws3b", 54), ("em3164wdm", 55), ("em316nm", 56), ("em3164sw", 57), ("em3164hub", 58), ("em316sc3m", 59), ("em316sc8m", 60), ("em316sc3s", 61), ("em316sc5s", 62), ("em316fr1", 63), ("em316fr2", 64), ("em316fr3", 65), ("em316gr1", 66), ("em316gr2", 67), ("em316gr3", 68), ("em316f21", 69), ("em316f22", 70), ("em316wdm4", 71), ("em316g", 72), ("em316gsf", 73), ("em316fn", 74), ("em316fsfn", 75), ("em316fmsn", 76), ("em316fmsfn", 77), ("em316asn", 78), ("em316asfsn", 79), ("em316fdsn", 80), ("em316fdsfsn", 81), ("em316o3sn", 82), ("em316o3sfsn", 83), ("em316o12sn", 84), ("em316o12sfsn", 85), ("em316emsn", 86), ("em316emsfsn", 87), ("em316ssn", 88), ("em316ssfsn", 89), ("em316tr", 90), ("em316t1", 91), ("em316t1sf", 92), ("nc3162bu", 93), ("em316wdm4o12", 94), ("em316wdm4o3", 95), ("em316grg", 96), ("em316mso12", 97), ("em316mso3", 98), ("em316e1", 99), ("em316e1sf", 100), ("wdmtrnk", 101), ("em316wdm43", 102), ("em316wdm44", 103), ("em104", 104), ("em105", 105), ("em106", 106), ("em316ds31", 107), ("em316ds32", 108), ("em3164sw1", 109), ("em3166sw1", 110), ("em3166sw2", 111), ("em316wfcs", 112), ("em316wfts", 113), ("em316e11", 114), ("em316e12", 115), ("nc316bu31", 116), ("nc316bu32", 117), ("em316od3", 118), ("nc316nw41", 119), ("nc316nw42", 120), ("em316em1", 121), ("em316e2", 122), ("em316fc", 123), ("em316fcsf", 124), ("nc316nw43", 125), ("nc316nw44", 126), ("em316o48", 127), ("em316o48sf", 128), ("ns129", 129), ("ns130", 130), ("ns131", 131), ("em3163sw", 132), ("em3163swsf", 133), ("em316o3c1", 134), ("em316o3csf", 135), ("nc316nw45", 136), ("nc316nw46", 137), ("em316wdm4f", 138), ("em316wdm4fc", 139), ("em316dpg", 140), ("em3162gsws", 141), ("ns142", 142), ("em316wgcs", 143), ("em316wgts", 144), ("em316wfccs", 145), ("em316wfcts", 146), ("em316wecs", 147), ("em316wets", 148), ("em316osw", 149), ("ns150", 150), ("ns151", 151), ("em316fe11l", 152), ("em316ft11l", 153), ("em316wdm81", 154), ("ns155", 155), ("wdm38", 156), ("ns157", 157), ("em316o3f1", 158), ("ns159", 159), ("em316wdm85", 160), ("em316wdmc3", 161), ("ns162", 162), ("em316fmsh", 163), ("ns164", 164), ("ns165", 165), ("ns166", 166), ("em316e31", 167), ("ns168", 168), ("em316fe12r", 169), ("ns170", 170), ("ns171", 171), ("ns172", 172), ("em316gc1", 173), ("em316gcsf", 174), ("ns175", 175), ("ns176", 176), ("em316ds3sh", 177), ("ns178", 178), ("em316nmhb1", 179), ("em316ds3r", 180), ("ns181", 181), ("em316fe11r", 182), ("em316ft11r", 183), ("ns184", 184), ("em316wdmc4", 185), ("em316adsl1", 186), ("ns187", 187), ("ns188", 188), ("ns189", 189), ("ns190", 190), ("ns191", 191), ("ns192", 192), ("ns193", 193), ("ns194", 194), ("em316gccsf", 195), ("em316gctsf", 196), ("em316osh", 197), ("ns198", 198), ("ns199", 199), ("ns200", 200), ("ns201", 201), ("ns202", 202), ("ns203", 203), ("ns204", 204), ("ns205", 205), ("ns206", 206), ("ns207", 207), ("ns208", 208), ("ns209", 209), ("em316sadm1", 210), ("ns211", 211), ("ns212", 212), ("em316flm1", 213), ("em316flm2", 214), ("ns215", 215), ("ns216", 216), ("ns217", 217), ("ns218", 218), ("wdm24ctr", 219), ("ns220", 220), ("wdm24ctl", 221), ("em316frm1", 222), ("em316frm2", 223), ("wdm44sf", 224), ("em316swrfhp", 225), ("ns226", 226), ("em316swhp", 227), ("ns228", 228), ("em316f2rm1", 229), ("em316f2rm2", 230), ("ns231", 231), ("ns232", 232), ("ns233", 233), ("ns234", 234), ("ns235", 235), ("ns236", 236), ("ns237", 237), ("ns238", 238), ("em316wfrmc", 239), ("em316wfrmt", 240), ("em316t1mux1", 241), ("em316t1mux2", 242), ("em316e1mux4j", 243), ("em316e1x4sfj", 244), ("ns245", 245), ("em316efrm1", 246), ("em316efrm2", 247), ("ns248", 248), ("ns249", 249), ("ns250", 250), ("ns251", 251), ("ns252", 252), ("ns253", 253), ("ns254", 254)) + NamedValues(("ns255", 255), ("ns256", 256), ("ns257", 257), ("em316sc1021", 258), ("ns259", 259), ("ns260", 260), ("ns261", 261), ("em316edsc1", 262), ("em316edsc2", 263), ("em316wdmslot", 264), ("em316wdmc265", 265), ("empty266", 266), ("em316wp1", 267), ("em316wp2", 268), ("em316oa", 269), ("em316e1mux1", 270), ("em316e1mux2", 271), ("em3162tsfp", 272), ("em316dmr48", 273), ("ns3162sfpr", 274), ("ns316xp342r", 275), ("em316ef", 276), ("em316efsf", 277), ("em316padms", 278), ("ns279", 279), ("ns280", 280), ("ns281", 281), ("ns316f16csfp", 282), ("ns316sdi8", 283), ("ns284", 284), ("em316wdmpa4", 285), ("em316wdmpa4t", 286), ("ns287", 287), ("em3162gbicl", 288), ("em3162gbicr", 289), ("em316ge1sfl", 290), ("em316ge1sfr", 291), ("em316fchub", 292), ("em316fcr", 293), ("em316mr48", 294), ("ns295", 295), ("em316fe1xx", 296), ("em316ft1sf", 297), ("em316gbicsfp", 298), ("ns299", 299), ("ns300", 300), ("em316pamulc8n", 301), ("em316pamulc4n", 302), ("em316t1muxrrm", 303), ("em316e1muxrrm", 304), ("ns305", 305), ("em316wo3c", 306), ("ns307", 307), ("em316grmah", 308), ("em316grmahsf", 309), ("em316efrmah", 310), ("em316efrmahsf", 311), ("em316erm", 312), ("em316ermsf", 313), ("em316efan", 314), ("em316efansf", 315), ("ns316", 316), ("nc316Xp343r", 317), ("ns318", 318), ("em316pamulc8", 319), ("em316pamulc4", 320), ("cm316fFtth", 321), ("ns322", 322), ("ns323", 323), ("ns324", 324), ("ns325", 325), ("em316padm41mu", 326), ("ns327", 327), ("em316pamuscm4", 328), ("em316pamuscd4", 329), ("em316pamuscm8", 330), ("em316pamuscd8", 331), ("em316muxmusc16", 332), ("em316dmuxmusc16", 333), ("ns334", 334), ("em316dpadms", 335), ("ns336", 336), ("em316dwmux16", 337), ("em316dwdmx16", 338), ("ns339", 339), ("ns340", 340), ("em316fe1sf", 341), ("em316xt1", 342), ("em316fe1rj", 343), ("em316gt1sfv", 344), ("ns345", 345), ("ns346", 346), ("ns347", 347), ("ns348", 348), ("ns349", 349), ("nc316xp322", 350), ("nc316xp323", 351), ("em316wermc", 352), ("em316wermt", 353), ("ns354", 354), ("ns355", 355), ("ns356", 356), ("ns357", 357), ("em316ee1rmft", 358), ("em316xe1rmft", 359), ("em316lx2", 360), ("em316lxm", 361), ("em316dwmux32", 362), ("em316dwdmx32v", 363), ("em316dwmux32nv", 364), ("em316dwdmx32n", 365), ("ns366", 366), ("ns367", 367), ("em316fe1rmft", 368), ("em316efe1ah", 369), ("em316eft1ah", 370), ("em316efe1rj", 371), ("ns372", 372), ("ns373", 373), ("ns374", 374), ("em316grmahsh", 375), ("em316ermahsh", 376), ("ns377", 377), ("ns378", 378), ("em316ermah", 379), ("ns380", 380), ("em3162sfpx", 381), ("ns382", 382), ("pmcwdm8sfp", 383), ("ns384", 384), ("ns385", 385), ("mccSfp36", 386), ("mccGRj36", 387), ("em316osc", 388), ("em316gemx2r", 389), ("em316gemx6r", 390), ("mccSfp72", 391), ("mccGRj72", 392), ("em316gcl", 393), ("em316gclsf", 394), ("em316wgclc", 395), ("em316wgclt", 396), ("ns397", 397), ("ns398", 398), ("ns399", 399), ("ns400", 400), ("ns401", 401), ("ns402", 402), ("ns403", 403), ("ns404", 404), ("ns405", 405), ("ns406", 406), ("ns407", 407), ("ns408", 408), ("ns409", 409), ("ns410", 410), ("ns411", 411), ("ns412", 412), ("ns413", 413), ("ns414", 414), ("ns415", 415), ("ns416", 416), ("em316xfpr", 417), ("oemntgrmah", 418), ("oemntermah", 419), ("oemntnm", 420), ("em316wds3c", 421), ("em316wds3t", 422), ("em316we3c", 423), ("em316we3t", 424), ("ns425", 425), ("ns426", 426), ("em316eft1mua4v", 427), ("em316efx1mub4", 428), ("em316efe1muc4v", 429), ("ns430", 430), ("ns431", 431), ("ns432", 432), ("em316t1mux4rm", 433), ("em316e1muxrjrm", 434), ("em316e1mux4rm", 435), ("em316dmr", 436), ("em316mr", 437), ("ns438", 438), ("ns439", 439), ("ns440", 440), ("em316ge1rjsf", 441), ("em316mr48q", 442), ("em316dmr48q", 443), ("em316mrmx2r", 444), ("ns445", 445), ("ns446", 446), ("ns447", 447), ("ns448", 448), ("ns449", 449), ("ns450", 450), ("mcc9xfp", 451), ("ns452", 452), ("em316cdadd2", 453), ("em316cdadd1", 454), ("ns455", 455), ("ns456", 456), ("em316nmlx12", 457), ("em316nmlx21", 458), ("em316nmlx", 459), ("ns460", 460), ("em316sw22", 461), ("em316sw12", 462), ("em316sw04", 463), ("em316sw13", 464), ("ns465", 465), ("ns466", 466), ("ns467", 467), ("ns468", 468), ("ns469", 469), ("ns470", 470), ("em3164swb", 471), ("ns472", 472), ("ns473", 473), ("ns474", 474), ("em316csadsxx", 475), ("em316csadsxxyy", 476), ("em316csaddxx", 477), ("em316csaddxxyy", 478), ("em3163swb", 479), ("em316ds3", 480), ("em316dt3e3", 481), ("ns482", 482), ("em316mux4xn", 483), ("em316dmx4xn", 484), ("em316mux4xbd", 485), ("em316dmx4xbd", 486), ("em316mux8nbd", 487), ("em316dmx8nbd", 488), ("em316mux8bd", 489), ("em316dmx8bd", 490), ("em316dpadxx", 491), ("em316dpadxxyy", 492), ("em316dpad4xx", 493), ("em316dpad8xx", 494), ("em316wt1c", 495), ("ns496", 496), ("em316gt1rm", 497), ("em316g6t1rm1", 498), ("em316g6t1rm2", 499), ("em316dsadsxx", 500), ("em316ddaddxx", 501), ("em316ddaddxxyy", 502), ("em316edfalv", 503), ("em316psc", 504), ("em316sos", 505), ("em316doscb", 506), ("em316padm8", 507), ("em316csads4", 508), ("ns509", 509)) + NamedValues(("ns510", 510), ("ns511", 511), ("ns512", 512), ("em316plc", 513), ("ns514", 514), ("ns515", 515), ("ns516", 516), ("ns517", 517), ("ns518", 518), ("em316dwmx8", 519), ("ns520", 520), ("em316genpasv", 521), ("em316ge1rm", 522), ("ns523", 523), ("ns524", 524), ("em316g6e1rms2", 525), ("ns526", 526), ("ns527", 527), ("ns528", 528), ("ns529", 529), ("mcc18t1e1", 530), ("ns531", 531), ("ns532", 532), ("mcc18dt3e3", 533), ("em316edfar", 534), ("ns535", 535), ("ns536", 536), ("ns537", 537), ("em316ossh", 538), ("em316sc3", 539), ("ns540", 540), ("em316fc400", 541), ("ns542", 542), ("ns543", 543), ("ns544", 544), ("em316eusmv", 545), ("ns546", 546), ("ns547", 547), ("em316dcm100r", 548), ("em316dcm100l", 549), ("ns550", 550), ("em316twoxfpet", 551), ("em316dwmux16be", 552), ("ns553", 553), ("ns554", 554), ("empmc8xfp", 555), ("ns556", 556), ("em316dwmx16bem", 557), ("ns558", 558), ("em316e1t1xy", 559), ("dwmx32rbm", 560), ("ns561", 561), ("ns562", 562), ("ns563", 563), ("empmc36t1e1", 564), ("ns565", 565), ("em316palc8nl", 566), ("em316palc8nr", 567), ("em316gswxy", 568), ("em316dwd40m5713", 569), ("em316dwd40m5712", 570), ("em316dwd40m5711", 571), ("em316mux535531b", 572), ("ns573", 573), ("em31610gxy", 574), ("ns575", 575), ("ns576", 576), ("ns577", 577), ("ns578", 578), ("ns579", 579), ("ns580", 580), ("ns581", 581), ("ns582", 582), ("ns583", 583), ("ns584", 584), ("em316os2", 585), ("em316osa", 586), ("ns587", 587), ("ns588", 588), ("ns589", 589), ("ns590", 590), ("ns591", 591), ("ns592", 592), ("em316ea", 593), ("ns594", 594), ("em316eusm10gr", 595), ("em316eusm10gl", 596), ("em316dmdxa16b1", 597), ("em316dmdxa16b2", 598), ("em316dmdxa16b3", 599), ("em316dmdxa16b4", 600), ("em316dmdxa16b5", 601), ("em316dmdxa40m01", 602), ("em316dmdxa40m02", 603), ("em316dmdxa40m03", 604), ("em316dmdxa40m04", 605), ("em316dmdxa40m05", 606), ("em316dmdxa40m06", 607), ("em316dmdxa40m07", 608), ("em316dmdxa40m08", 609), ("em316dmdxa40m09", 610), ("em316dmdxa40m10", 611), ("em316dmdxa40m11", 612), ("em316dmdxa16ra", 613), ("em316dmdxa16rb", 614), ("em31620g1", 615), ("em31620g2", 616), ("em31640g3", 617), ("em31640g4", 618), ("em31640g5", 619), ("em316rpon", 620), ("ns621", 621), ("empmc36sas", 622), ("em316osw8", 623), ("ns624", 624), ("ns625", 625), ("em31610g8swxyr", 626), ("em31610g8swxym", 627), ("em31610g8swxyl", 628), ("ns629", 629), ("em316cmux831b", 630), ("ns631", 631), ("em316mdx46ma001", 632), ("em316mdx46ma002", 633), ("em316mdx46ma003", 634), ("em316mdx46ma004", 635), ("em316mdx46ma005", 636), ("em316mdx46ma006", 637), ("em316mdx46ma007", 638), ("em316mdx46ma008", 639), ("em316mdx46ma009", 640), ("em316mdx46ma010", 641), ("em316mdx46ma011", 642), ("em316mdx46ma012", 643), ("em316osw128a", 644), ("em316osw128b", 645), ("em316osw128c", 646), ("em316osw128d", 647), ("em316osw128e", 648), ("em316osw128f", 649), ("em316osw128g", 650), ("em316osw128h", 651), ("em316osw128i", 652), ("em316osw128j", 653), ("em316osw128k", 654), ("em316osw128l", 655), ("em316osw128m", 656), ("ns657", 657), ("em316dcmxx", 658), ("em316osshlc", 659), ("em316eavg2217", 660), ("em316dmr10g3r", 661), ("em316fdt1e1rm", 662), ("em316sw8fxr", 663), ("em316sw8fxlv", 664), ("em316mdx46mx002", 665), ("em316mdx46mb003", 666), ("em316mdx46mb002", 667), ("em316mdx46mc002", 668), ("em316eamlp2017v", 669), ("ns670", 670), ("em316gemx4rr", 671), ("em316gemx4rlv", 672), ("empmcqsfp36", 673), ("ns674", 674), ("ns675", 675), ("em3162qsfp40", 676), ("ns677", 677), ("ns678", 678), ("mcc36ic", 679), ("ns680", 680), ("em316voar", 681), ("em316voalv", 682), ("em316dvmdxa", 683), ("em316dvmdxbv", 684), ("em316cmdxm8al", 685), ("em316cmdxm8ar", 686), ("ns687", 687), ("ns688", 688), ("em316dvmdxav1", 689), ("em316dvmdxav2", 690), ("em316dvmdxav3", 691), ("em316dvmdxav4", 692), ("em316dvmdxav5", 693), ("em316dvmdxav6", 694), ("em316dvmdxav7", 695), ("em316dvmdxav8", 696), ("em316dvmdxav9", 697), ("ns698", 698), ("ns699", 699), ("ns700", 700), ("em316ra12r", 701), ("em316ra12lv", 702), ("ns703", 703), ("em316ra12mv", 704), ("ns705", 705), ("ns706", 706), ("em316dmr10gf", 707), ("ns708", 708), ("ns709", 709), ("ns710", 710), ("ns711", 711), ("ns712", 712), ("ns713", 713), ("ns714", 714), ("ns715", 715), ("ns716", 716), ("ns717", 717), ("ns718", 718), ("ns719", 719), ("oddmr10g3r", 720), ("oddmr10gf", 721), ("od2hwss4dws", 722), ("od2hmxp100g", 723), ("odtxp100gf2c", 724), ("ns725", 725), ("em316raf10", 726), ("ns727", 727), ("odtxp100g2c", 728), ("ns729", 729), ("od2hwss4dcw", 730), ("ns731", 731), ("ns732", 732), ("odugc", 733), ("ns734", 734), ("ns735", 735), ("odfiller", 736), ("odtxp100g2cw1", 737), ("od2hwss4dww", 738), ("ns739", 739), ("ns740", 740), ("ns741", 741), ("ns742", 742), ("ns743", 743), ("ns744", 744), ("ns745", 745), ("ns746", 746), ("em316twoxfp16g", 747), ("od2hdwss4dws", 748), ("ns749", 749), ("ns750", 750), ("ns751", 751), ("ns752", 752), ("od2hdmx10g", 753), ("ns754", 754), ("ns755", 755), ("ns756", 756), ("odtxp100gf", 757))
class NbsCmmcEnumPortConnector(TextualConvention, Integer32):
description = 'The Port Connector.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40))
namedValues = NamedValues(("unknown", 1), ("removed", 2), ("foDSC", 3), ("foSC", 4), ("cuRj45", 5), ("foLC", 6), ("coaxF", 7), ("coaxBNC", 8), ("coax2BNC", 9), ("cuRj45wLEDs", 10), ("cuRj11", 11), ("cuDb9", 12), ("cuHssdc", 13), ("coaxHeader", 14), ("foFiberJack", 15), ("foMtRj", 16), ("foMu", 17), ("sg", 18), ("foPigtail", 19), ("cuPigtail", 20), ("smb", 21), ("firewireA", 22), ("firewireB", 23), ("cuRj48", 24), ("fo1LC", 25), ("fo2ST", 26), ("sataDevicePlug", 27), ("sataHostPlug", 28), ("miniCoax", 29), ("mpo", 30), ("miniSAS4x", 31), ("reserved", 32), ("cxpCuPassive", 33), ("cxpCuActive", 34), ("cxpFoActive", 35), ("cxpFoConnect", 36), ("fc", 37), ("cuMicroUsbB", 38), ("rj45wUSBRJ45Active", 39), ("rj45wUSBUSBActive", 40))
class NbsCmmcChannelBand(TextualConvention, Integer32):
description = "The ITU grid labels DWDM channels with a letter 'band' and a numeric channel. Within this mib, the band is indicated by this object, and the channel number is shown in the object nbsOsaChannelNumber. Frequencies of at least 180100 GHz but less than 190100 GHz are considered the L spectrum, and frequencies of at least 190100 but less than 200100 GHz are considered the C spectrum. Frequencies evenly divisible by 100 GHz are designated with a 'C' or 'L' prepended to the channel number. Frequencies that are offset by 50 GHz are designated 'H' within the C spectrum, and 'Q' within the L spectrum."
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))
namedValues = NamedValues(("notSupported", 0), ("cBand", 1), ("hBand", 2), ("lBand", 3), ("qBand", 4))
mibBuilder.exportSymbols("NBS-CMMCENUM-MIB", NbsCmmcChannelBand=NbsCmmcChannelBand, NbsCmmcEnumChassisType=NbsCmmcEnumChassisType, NbsCmmcEnumPortConnector=NbsCmmcEnumPortConnector, PYSNMP_MODULE_ID=nbsCmmcEnumMib, NbsCmmcEnumSlotType=NbsCmmcEnumSlotType, nbsCmmcEnumMib=nbsCmmcEnumMib, NbsCmmcEnumSlotOperationType=NbsCmmcEnumSlotOperationType)
| 502.509804
| 14,740
| 0.619479
| 3,117
| 25,628
| 5.092717
| 0.584216
| 0.010142
| 0.011654
| 0.010394
| 0.078556
| 0.066083
| 0.066083
| 0.066083
| 0.066083
| 0.066083
| 0
| 0.350791
| 0.122366
| 25,628
| 50
| 14,741
| 512.56
| 0.354971
| 0.01272
| 0
| 0.131579
| 0
| 0.052632
| 0.360021
| 0.002609
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.052632
| 0.184211
| 0
| 0.842105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
b407461fa89fba0f6176d3be633dbbda87955866
| 5,096
|
py
|
Python
|
tests/components/nuheat/test_climate.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 6
|
2020-07-18T16:33:25.000Z
|
2021-09-26T09:52:04.000Z
|
tests/components/nuheat/test_climate.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47
|
2020-07-23T07:14:33.000Z
|
2022-03-31T06:01:46.000Z
|
tests/components/nuheat/test_climate.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 5
|
2020-03-29T00:29:13.000Z
|
2021-09-06T20:58:40.000Z
|
"""The test for the NuHeat thermostat module."""
from homeassistant.components.nuheat.const import DOMAIN
from homeassistant.setup import async_setup_component
from .mocks import (
_get_mock_nuheat,
_get_mock_thermostat_run,
_get_mock_thermostat_schedule_hold_available,
_get_mock_thermostat_schedule_hold_unavailable,
_get_mock_thermostat_schedule_temporary_hold,
_mock_get_config,
)
from tests.async_mock import patch
async def test_climate_thermostat_run(hass):
"""Test a thermostat with the schedule running."""
mock_thermostat = _get_mock_thermostat_run()
mock_nuheat = _get_mock_nuheat(get_thermostat=mock_thermostat)
with patch(
"homeassistant.components.nuheat.nuheat.NuHeat", return_value=mock_nuheat,
):
assert await async_setup_component(hass, DOMAIN, _mock_get_config())
await hass.async_block_till_done()
state = hass.states.get("climate.master_bathroom")
assert state.state == "auto"
expected_attributes = {
"current_temperature": 22.2,
"friendly_name": "Master bathroom",
"hvac_action": "heating",
"hvac_modes": ["auto", "heat"],
"max_temp": 69.4,
"min_temp": 5.0,
"preset_mode": "Run Schedule",
"preset_modes": ["Run Schedule", "Temporary Hold", "Permanent Hold"],
"supported_features": 17,
"temperature": 22.2,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
async def test_climate_thermostat_schedule_hold_unavailable(hass):
"""Test a thermostat with the schedule hold that is offline."""
mock_thermostat = _get_mock_thermostat_schedule_hold_unavailable()
mock_nuheat = _get_mock_nuheat(get_thermostat=mock_thermostat)
with patch(
"homeassistant.components.nuheat.nuheat.NuHeat", return_value=mock_nuheat,
):
assert await async_setup_component(hass, DOMAIN, _mock_get_config())
await hass.async_block_till_done()
state = hass.states.get("climate.guest_bathroom")
assert state.state == "unavailable"
expected_attributes = {
"friendly_name": "Guest bathroom",
"hvac_modes": ["auto", "heat"],
"max_temp": 180.6,
"min_temp": -6.1,
"preset_modes": ["Run Schedule", "Temporary Hold", "Permanent Hold"],
"supported_features": 17,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
async def test_climate_thermostat_schedule_hold_available(hass):
"""Test a thermostat with the schedule hold that is online."""
mock_thermostat = _get_mock_thermostat_schedule_hold_available()
mock_nuheat = _get_mock_nuheat(get_thermostat=mock_thermostat)
with patch(
"homeassistant.components.nuheat.nuheat.NuHeat", return_value=mock_nuheat,
):
assert await async_setup_component(hass, DOMAIN, _mock_get_config())
await hass.async_block_till_done()
state = hass.states.get("climate.available_bathroom")
assert state.state == "auto"
expected_attributes = {
"current_temperature": 38.9,
"friendly_name": "Available bathroom",
"hvac_action": "idle",
"hvac_modes": ["auto", "heat"],
"max_temp": 180.6,
"min_temp": -6.1,
"preset_mode": "Run Schedule",
"preset_modes": ["Run Schedule", "Temporary Hold", "Permanent Hold"],
"supported_features": 17,
"temperature": 26.1,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
async def test_climate_thermostat_schedule_temporary_hold(hass):
"""Test a thermostat with the temporary schedule hold that is online."""
mock_thermostat = _get_mock_thermostat_schedule_temporary_hold()
mock_nuheat = _get_mock_nuheat(get_thermostat=mock_thermostat)
with patch(
"homeassistant.components.nuheat.nuheat.NuHeat", return_value=mock_nuheat,
):
assert await async_setup_component(hass, DOMAIN, _mock_get_config())
await hass.async_block_till_done()
state = hass.states.get("climate.temp_bathroom")
assert state.state == "auto"
expected_attributes = {
"current_temperature": 94.4,
"friendly_name": "Temp bathroom",
"hvac_action": "idle",
"hvac_modes": ["auto", "heat"],
"max_temp": 180.6,
"min_temp": -0.6,
"preset_mode": "Run Schedule",
"preset_modes": ["Run Schedule", "Temporary Hold", "Permanent Hold"],
"supported_features": 17,
"temperature": 37.2,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
| 38.029851
| 88
| 0.693485
| 630
| 5,096
| 5.31746
| 0.15873
| 0.066866
| 0.034925
| 0.044776
| 0.853134
| 0.835224
| 0.791045
| 0.750448
| 0.750448
| 0.693134
| 0
| 0.012057
| 0.202512
| 5,096
| 133
| 89
| 38.315789
| 0.812254
| 0.084576
| 0
| 0.575758
| 0
| 0
| 0.233825
| 0.061748
| 0
| 0
| 0
| 0
| 0.121212
| 1
| 0
| false
| 0
| 0.040404
| 0
| 0.040404
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b41a454ed2f8f3357932b810003c89259baf7646
| 32
|
py
|
Python
|
distributed/http/__init__.py
|
bnavigator/distributed
|
5ea74cd7ec3cf5b90c5e6d5a31fb7dc3888ea242
|
[
"BSD-3-Clause"
] | 1,358
|
2016-02-09T21:25:27.000Z
|
2022-03-30T08:06:36.000Z
|
distributed/http/__init__.py
|
bnavigator/distributed
|
5ea74cd7ec3cf5b90c5e6d5a31fb7dc3888ea242
|
[
"BSD-3-Clause"
] | 4,789
|
2016-02-10T00:13:43.000Z
|
2022-03-31T23:56:27.000Z
|
distributed/http/__init__.py
|
bnavigator/distributed
|
5ea74cd7ec3cf5b90c5e6d5a31fb7dc3888ea242
|
[
"BSD-3-Clause"
] | 791
|
2016-02-19T04:34:38.000Z
|
2022-03-31T16:26:38.000Z
|
from .utils import get_handlers
| 16
| 31
| 0.84375
| 5
| 32
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b41fc96145b0ed78cd0b618b1576aede991689df
| 45
|
py
|
Python
|
damster/metrics/bamboo/__init__.py
|
cattz/damster
|
70ac2378760197b2e89150c8632f5bf8fe17167d
|
[
"Apache-2.0"
] | null | null | null |
damster/metrics/bamboo/__init__.py
|
cattz/damster
|
70ac2378760197b2e89150c8632f5bf8fe17167d
|
[
"Apache-2.0"
] | 1
|
2018-05-07T10:57:06.000Z
|
2018-05-28T10:04:45.000Z
|
damster/metrics/bamboo/__init__.py
|
cattz/damster
|
70ac2378760197b2e89150c8632f5bf8fe17167d
|
[
"Apache-2.0"
] | null | null | null |
from .agents import BambooBuildAgentsMetrics
| 22.5
| 44
| 0.888889
| 4
| 45
| 10
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.97561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
81eae86a5dbcd5d5bcdd63474551a84cf22d17e1
| 44
|
py
|
Python
|
src/robotic_lacing/lacing_vision_package/__init__.py
|
laurend/robotic_lacing
|
4865e8752947faa49f373f716399dee406897b4a
|
[
"MIT"
] | null | null | null |
src/robotic_lacing/lacing_vision_package/__init__.py
|
laurend/robotic_lacing
|
4865e8752947faa49f373f716399dee406897b4a
|
[
"MIT"
] | 1
|
2020-10-11T14:04:24.000Z
|
2020-10-11T14:04:24.000Z
|
src/robotic_lacing/lacing_vision_package/__init__.py
|
laurend/robotic_lacing
|
4865e8752947faa49f373f716399dee406897b4a
|
[
"MIT"
] | null | null | null |
from .D435i_Vision_Laura import find_corners
| 44
| 44
| 0.909091
| 7
| 44
| 5.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 0.068182
| 44
| 1
| 44
| 44
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c31f399544493623b1337a22fa1ee45c92891d8d
| 257
|
py
|
Python
|
src/cloudsigma/__init__.py
|
cloudsigma/pycloudsigma
|
220a18481c84357b4a4251241a01faae68f44ae1
|
[
"BSD-3-Clause"
] | 13
|
2015-04-24T10:38:04.000Z
|
2022-01-22T12:48:56.000Z
|
src/cloudsigma/__init__.py
|
cloudsigma/pycloudsigma
|
220a18481c84357b4a4251241a01faae68f44ae1
|
[
"BSD-3-Clause"
] | 8
|
2015-02-23T18:16:14.000Z
|
2022-02-17T09:50:24.000Z
|
src/cloudsigma/__init__.py
|
cloudsigma/pycloudsigma
|
220a18481c84357b4a4251241a01faae68f44ae1
|
[
"BSD-3-Clause"
] | 6
|
2015-02-23T07:39:48.000Z
|
2021-08-08T09:34:50.000Z
|
from cloudsigma.version import __version__
from cloudsigma import bulk
from cloudsigma import conf
from cloudsigma import errors
from cloudsigma import generic
from cloudsigma import resource
from cloudsigma import scenarios
from cloudsigma import metadata
| 28.555556
| 42
| 0.871595
| 33
| 257
| 6.666667
| 0.333333
| 0.509091
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124514
| 257
| 8
| 43
| 32.125
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c324dd4989b277b351625ceef748eda6c2eb5d42
| 4,852
|
py
|
Python
|
pybamm/models/submodels/electrode/ohm/li_metal.py
|
emptylkj/PyBaMM
|
9280577deef09507452beafb56d1231fbfaaf110
|
[
"BSD-3-Clause"
] | null | null | null |
pybamm/models/submodels/electrode/ohm/li_metal.py
|
emptylkj/PyBaMM
|
9280577deef09507452beafb56d1231fbfaaf110
|
[
"BSD-3-Clause"
] | null | null | null |
pybamm/models/submodels/electrode/ohm/li_metal.py
|
emptylkj/PyBaMM
|
9280577deef09507452beafb56d1231fbfaaf110
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Subodels for a lithium metal electrode
#
import pybamm
from .base_ohm import BaseModel
class LithiumMetalSurfaceForm(BaseModel):
"""Explicit model for potential drop across a lithium metal electrode.
Parameters
----------
param : parameter class
The parameters to use for this submodel
options : dict, optional
A dictionary of options to be passed to the model.
**Extends:** :class:`pybamm.electrode.ohm.BaseModel`
"""
def __init__(self, param, options=None):
super().__init__(param, "Negative", options=options)
def get_fundamental_variables(self):
ocp_ref = self.param.U_n_ref
pot_scale = self.param.potential_scale
delta_phi = pybamm.Variable(
"Lithium metal interface surface potential difference",
domain="current collector",
)
variables = {
"Lithium metal interface surface potential difference": delta_phi,
"Lithium metal interface surface potential difference [V]": ocp_ref
+ delta_phi * pot_scale,
}
return variables
def get_coupled_variables(self, variables):
param = self.param
i_boundary_cc = variables["Current collector current density"]
T_n = variables["Negative current collector temperature"]
l_n = param.l_n
delta_phi_s = i_boundary_cc * l_n / param.sigma_n(T_n)
delta_phi_s_dim = param.potential_scale * delta_phi_s
phi_s_cn = variables["Negative current collector potential"]
delta_phi = variables["Lithium metal interface surface potential difference"]
# Potentials at the anode/separator interface
phi_s = phi_s_cn - delta_phi_s
phi_e = phi_s - delta_phi
variables.update(
{
"Negative electrode potential drop": delta_phi_s,
"Negative electrode potential drop [V]": delta_phi_s_dim,
"X-averaged negative electrode ohmic losses": delta_phi_s / 2,
"X-averaged negative electrode ohmic losses [V]": delta_phi_s_dim / 2,
"Lithium metal interface electrode potential": phi_s,
"Lithium metal interface electrolyte potential": phi_e,
}
)
return variables
def set_initial_conditions(self, variables):
delta_phi = variables["Lithium metal interface surface potential difference"]
delta_phi_init = self.param.U_n_init
self.initial_conditions = {delta_phi: delta_phi_init}
def set_algebraic(self, variables):
j_pl = variables["Lithium metal plating current density"]
j_sei = variables["SEI interfacial current density"]
sum_j = j_pl + j_sei
i_cc = variables["Current collector current density"]
delta_phi = variables["Lithium metal interface surface potential difference"]
self.algebraic[delta_phi] = i_cc - sum_j
class LithiumMetalExplicit(BaseModel):
"""Explicit model for potential drop across a lithium metal electrode.
Parameters
----------
param : parameteslackr class
The parameters to use for this submodel
options : dict, optional
A dictionary of options to be passed to the model.
**Extends:** :class:`pybamm.electrode.ohm.BaseModel`
"""
def __init__(self, param, options=None):
super().__init__(param, "Negative", options=options)
def get_coupled_variables(self, variables):
param = self.param
ocp_ref = self.param.U_n_ref
pot_scale = self.param.potential_scale
i_boundary_cc = variables["Current collector current density"]
T_n = variables["Negative current collector temperature"]
l_n = param.l_n
delta_phi_s = i_boundary_cc * l_n / param.sigma_n(T_n)
delta_phi_s_dim = param.potential_scale * delta_phi_s
phi_s_cn = variables["Negative current collector potential"]
delta_phi = variables["Lithium metal interface surface potential difference"]
phi_s = phi_s_cn - delta_phi_s
phi_e = phi_s - delta_phi
variables.update(
{
"Negative electrode potential drop": delta_phi_s,
"Negative electrode potential drop [V]": delta_phi_s_dim,
"X-averaged negative electrode ohmic losses": delta_phi_s / 2,
"X-averaged negative electrode ohmic losses [V]": delta_phi_s_dim / 2,
"Lithium metal interface electrode potential": phi_s,
"Lithium metal interface electrolyte potential": phi_e,
"Lithium metal interface surface potential difference": delta_phi,
"Lithium metal interface surface potential difference [V]": ocp_ref
+ delta_phi * pot_scale,
}
)
return variables
| 36.481203
| 86
| 0.651896
| 577
| 4,852
| 5.22877
| 0.168111
| 0.082201
| 0.04773
| 0.083527
| 0.830958
| 0.828306
| 0.799138
| 0.799138
| 0.799138
| 0.720583
| 0
| 0.001133
| 0.272465
| 4,852
| 132
| 87
| 36.757576
| 0.853541
| 0.138912
| 0
| 0.679012
| 0
| 0
| 0.321132
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08642
| false
| 0
| 0.024691
| 0
| 0.17284
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c32d89d4b69cf5814f0c4551f925a6ca9ecd546f
| 33
|
py
|
Python
|
cloudknot/commands/__init__.py
|
36000/cloudknot
|
47816a0f3437a7358d1c1ab0af748680c65ad2a2
|
[
"MIT"
] | 50
|
2017-10-29T03:30:28.000Z
|
2020-03-12T06:51:27.000Z
|
cloudknot/commands/__init__.py
|
36000/cloudknot
|
47816a0f3437a7358d1c1ab0af748680c65ad2a2
|
[
"MIT"
] | 130
|
2017-09-28T00:52:06.000Z
|
2020-04-20T00:07:45.000Z
|
cloudknot/commands/__init__.py
|
arokem/cloudknot
|
4ba413d7f14348d9eb22c1d9f20c821901cce58b
|
[
"MIT"
] | 11
|
2017-09-26T22:00:29.000Z
|
2019-10-12T12:14:26.000Z
|
from .configure import * # noqa
| 16.5
| 32
| 0.69697
| 4
| 33
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 33
| 1
| 33
| 33
| 0.884615
| 0.121212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c34fdf8d457241f6db0bc7df9c4acd58cdd3fe28
| 1,203
|
py
|
Python
|
tests/ext/test_sentry.py
|
hartungstenio/loafer
|
a1bed9d5ba3294094337a3c476946f3802645f53
|
[
"MIT"
] | 111
|
2016-04-11T12:58:07.000Z
|
2021-11-08T09:39:39.000Z
|
tests/ext/test_sentry.py
|
hartungstenio/loafer
|
a1bed9d5ba3294094337a3c476946f3802645f53
|
[
"MIT"
] | 43
|
2016-04-18T00:04:33.000Z
|
2021-11-07T22:52:31.000Z
|
tests/ext/test_sentry.py
|
hartungstenio/loafer
|
a1bed9d5ba3294094337a3c476946f3802645f53
|
[
"MIT"
] | 33
|
2016-05-30T15:13:46.000Z
|
2022-01-29T23:16:18.000Z
|
from unittest import mock
from loafer.ext.sentry import sentry_handler
def test_sentry_handler():
mock_scope = mock.MagicMock()
sdk_mocked = mock.Mock()
sdk_mocked.push_scope.return_value = mock_scope
handler = sentry_handler(sdk_mocked)
exc = ValueError("test")
exc_info = (type(exc), exc, None)
delete_message = handler(exc_info, "test")
assert delete_message is False
assert sdk_mocked.push_scope.called
mock_scope.__enter__.return_value.set_extra.assert_called_once_with(
"message", "test"
)
sdk_mocked.capture_exception.assert_called_once_with(exc_info)
def test_sentry_handler_delete_message():
mock_scope = mock.MagicMock()
sdk_mocked = mock.Mock()
sdk_mocked.push_scope.return_value = mock_scope
handler = sentry_handler(sdk_mocked, delete_message=True)
exc = ValueError("test")
exc_info = (type(exc), exc, None)
delete_message = handler(exc_info, "test")
assert delete_message is True
assert sdk_mocked.push_scope.called
mock_scope.__enter__.return_value.set_extra.assert_called_once_with(
"message", "test"
)
sdk_mocked.capture_exception.assert_called_once_with(exc_info)
| 30.075
| 72
| 0.740648
| 164
| 1,203
| 5.018293
| 0.219512
| 0.109356
| 0.063183
| 0.087485
| 0.831106
| 0.831106
| 0.831106
| 0.831106
| 0.831106
| 0.831106
| 0
| 0
| 0.169576
| 1,203
| 39
| 73
| 30.846154
| 0.823824
| 0
| 0
| 0.666667
| 0
| 0
| 0.031588
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6f13bf2934f4e9f6474a9f0699102d78f3c869ba
| 1,705
|
py
|
Python
|
tasks/example.py
|
diegograssato/python-base
|
47936675311266fe0bdb43faa6ed43c50539c53d
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
tasks/example.py
|
diegograssato/python-base
|
47936675311266fe0bdb43faa6ed43c50539c53d
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
tasks/example.py
|
diegograssato/python-base
|
47936675311266fe0bdb43faa6ed43c50539c53d
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
# example.py
import multitasking
import time
import random
import signal
import argparse
# kill all tasks on ctrl-c
from tasks import multicast
from tasks.multicast import multicast
from tasks.tasks import Tasks
from utils import ssh
import sys
signal.signal(signal.SIGINT, multitasking.killall)
def execute():
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--operation', type=str, choices=('network', 'multicast', 'unicast'), help='operation',
required=True)
parser.add_argument('-s', '--source', type=str, help='username', required=True)
parser.add_argument('-p', '--destination', type=str, help='password', required=True)
args = parser.parse_args()
# call the methods magically
test_command = getattr(sys.modules[__name__], args.operation)
if issubclass(test_command, Tasks):
command = test_command()
command.execute(args)
else:
raise NotImplementedError()
sys.exit(0)
# if __name__ == "__main__":
# parser = argparse.ArgumentParser()
# parser.add_argument('-o', '--operation', type=str, choices=('network', 'multicast', 'unicast'), help='operation',
# required=True)
# parser.add_argument('-s', '--source', type=str, help='username', required=True)
# parser.add_argument('-p', '--destination', type=str, help='password', required=True)
# args = parser.parse_args()
#
# # call the methods magically
# test_command = getattr(sys.modules[__name__], args.operation)
# if issubclass(test_command, Tasks):
# command = test_command()
# command.execute(args)
# else:
# raise NotImplementedError()
# sys.exit(0)
| 32.788462
| 119
| 0.665103
| 195
| 1,705
| 5.661538
| 0.317949
| 0.048913
| 0.092391
| 0.076087
| 0.759058
| 0.759058
| 0.759058
| 0.759058
| 0.759058
| 0.759058
| 0
| 0.001456
| 0.194135
| 1,705
| 51
| 120
| 33.431373
| 0.802038
| 0.441056
| 0
| 0
| 0
| 0
| 0.092275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0.04
| 0.4
| 0
| 0.44
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6f18143dc711ca74e883c7edcae97638e63f722f
| 8,278
|
py
|
Python
|
test/unit/test_action_restore.py
|
ssavrim/curator
|
3ddfbb95ea7a164bb3f8c773b0392481f067da7c
|
[
"Apache-2.0"
] | 2,449
|
2015-03-11T05:04:13.000Z
|
2022-03-30T11:24:31.000Z
|
test/unit/test_action_restore.py
|
ssavrim/curator
|
3ddfbb95ea7a164bb3f8c773b0392481f067da7c
|
[
"Apache-2.0"
] | 960
|
2015-03-10T20:58:11.000Z
|
2022-03-24T15:26:40.000Z
|
test/unit/test_action_restore.py
|
lenn4rd/curator
|
1bfa366325e0651397999999367b65df2c3c5c86
|
[
"Apache-2.0"
] | 702
|
2015-03-11T09:35:39.000Z
|
2022-03-28T06:22:59.000Z
|
from unittest import TestCase
from mock import Mock, patch
import elasticsearch
import curator
# Get test variables and constants from a single source
from . import testvars as testvars
class TestActionRestore(TestCase):
def test_init_raise_bad_snapshot_list(self):
self.assertRaises(TypeError, curator.Restore, 'invalid')
def test_init_raise_unsuccessful_snapshot_list(self):
client = Mock()
client.snapshot.get.return_value = testvars.partial
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
self.assertRaises(curator.CuratorException, curator.Restore, slo)
def test_snapshot_derived_name(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo)
self.assertEqual('snapshot-2015.03.01', ro.name)
def test_provided_name(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo, name=testvars.snap_name)
self.assertEqual(testvars.snap_name, ro.name)
def test_partial_snap(self):
client = Mock()
client.snapshot.get.return_value = testvars.partial
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo, partial=True)
self.assertEqual(testvars.snap_name, ro.name)
def test_provided_indices(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo, indices=testvars.named_indices)
self.assertEqual('snapshot-2015.03.01', ro.name)
def test_extra_settings(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo, extra_settings={'foo':'bar'})
self.assertEqual(ro.body['foo'], 'bar')
def test_bad_extra_settings(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo, extra_settings='invalid')
self.assertEqual(ro.body,
{
'ignore_unavailable': False,
'partial': False,
'include_aliases': False,
'rename_replacement': '',
'rename_pattern': '',
'indices': ['index-2015.01.01', 'index-2015.02.01'],
'include_global_state': False
}
)
def test_get_expected_output(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(
slo, rename_pattern='(.+)', rename_replacement='new_$1')
self.assertEqual(
ro.expected_output,
['new_index-2015.01.01', 'new_index-2015.02.01']
)
def test_do_dry_run(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo)
self.assertIsNone(ro.do_dry_run())
def test_do_dry_run_with_renames(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(
slo, rename_pattern='(.+)', rename_replacement='new_$1')
self.assertIsNone(ro.do_dry_run())
def test_report_state_all(self):
client = Mock()
client.info.return_value = {'version': {'number': '5.0.0'} }
client.snapshot.get.return_value = testvars.snapshot
client.snapshot.get_repository.return_value = testvars.test_repo
client.indices.get_settings.return_value = testvars.settings_named
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo)
self.assertIsNone(ro.report_state())
def test_report_state_not_all(self):
client = Mock()
client.info.return_value = {'version': {'number': '5.0.0'} }
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
client.indices.get_settings.return_value = testvars.settings_one
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(
slo, rename_pattern='(.+)', rename_replacement='new_$1')
self.assertRaises(curator.exceptions.FailedRestore, ro.report_state)
def test_do_action_success(self):
client = Mock()
client.info.return_value = {'version': {'number': '5.0.0'} }
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
client.snapshot.status.return_value = testvars.nosnap_running
client.snapshot.verify_repository.return_value = testvars.verified_nodes
client.indices.get_settings.return_value = testvars.settings_named
client.indices.recovery.return_value = testvars.recovery_output
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo, wait_interval=0.5, max_wait=1)
self.assertIsNone(ro.do_action())
def test_do_action_snap_in_progress(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
client.snapshot.status.return_value = testvars.snap_running
client.snapshot.verify_repository.return_value = testvars.verified_nodes
client.indices.get_settings.return_value = testvars.settings_named
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo)
self.assertRaises(curator.SnapshotInProgress, ro.do_action)
def test_do_action_success_no_wfc(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
client.snapshot.status.return_value = testvars.nosnap_running
client.snapshot.verify_repository.return_value = testvars.verified_nodes
client.indices.get_settings.return_value = testvars.settings_named
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo, wait_for_completion=False)
self.assertIsNone(ro.do_action())
def test_do_action_report_on_failure(self):
client = Mock()
client.snapshot.get.return_value = testvars.snapshots
client.snapshot.get_repository.return_value = testvars.test_repo
client.snapshot.status.return_value = testvars.nosnap_running
client.snapshot.verify_repository.return_value = testvars.verified_nodes
client.indices.get_settings.return_value = testvars.settings_named
client.snapshot.restore.side_effect = testvars.fake_fail
slo = curator.SnapshotList(client, repository=testvars.repo_name)
ro = curator.Restore(slo)
self.assertRaises(curator.FailedExecution, ro.do_action)
| 52.726115
| 80
| 0.702344
| 970
| 8,278
| 5.763918
| 0.121649
| 0.098372
| 0.159721
| 0.103738
| 0.812914
| 0.795028
| 0.795028
| 0.790556
| 0.782686
| 0.75228
| 0
| 0.009509
| 0.199686
| 8,278
| 156
| 81
| 53.064103
| 0.834415
| 0.006403
| 0
| 0.62987
| 0
| 0
| 0.038794
| 0
| 0
| 0
| 0
| 0
| 0.11039
| 1
| 0.11039
| false
| 0
| 0.032468
| 0
| 0.149351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6f34037d65f6c6132ec472bd26d3efe94f6fa07b
| 356
|
py
|
Python
|
ee/clickhouse/migrations/0011_cohortpeople_no_shard.py
|
csmatar/posthog
|
4587cfe18625f302726c531f06a32c18e9749e9d
|
[
"MIT"
] | 1
|
2021-07-28T19:44:48.000Z
|
2021-07-28T19:44:48.000Z
|
ee/clickhouse/migrations/0011_cohortpeople_no_shard.py
|
csmatar/posthog
|
4587cfe18625f302726c531f06a32c18e9749e9d
|
[
"MIT"
] | 15
|
2021-11-09T10:49:34.000Z
|
2021-11-09T16:11:01.000Z
|
ee/clickhouse/migrations/0011_cohortpeople_no_shard.py
|
csmatar/posthog
|
4587cfe18625f302726c531f06a32c18e9749e9d
|
[
"MIT"
] | 1
|
2021-09-08T19:45:03.000Z
|
2021-09-08T19:45:03.000Z
|
from infi.clickhouse_orm import migrations
from ee.clickhouse.sql.cohort import CREATE_COHORTPEOPLE_TABLE_SQL, DROP_COHORTPEOPLE_TABLE_SQL
from posthog.settings import CLICKHOUSE_REPLICATION
# run create table again with proper configuration
operations = [migrations.RunSQL(DROP_COHORTPEOPLE_TABLE_SQL), migrations.RunSQL(CREATE_COHORTPEOPLE_TABLE_SQL)]
| 44.5
| 111
| 0.876404
| 46
| 356
| 6.478261
| 0.5
| 0.228188
| 0.268456
| 0.174497
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075843
| 356
| 7
| 112
| 50.857143
| 0.905775
| 0.134831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
48939c02e3779e0df066755c652e15113cb79b57
| 39
|
py
|
Python
|
cls/p3.py
|
sanchez0623/zsq.LearningPython
|
419df031a2a905fe7d7c2dfe14aa2f8729989a9a
|
[
"Apache-2.0"
] | null | null | null |
cls/p3.py
|
sanchez0623/zsq.LearningPython
|
419df031a2a905fe7d7c2dfe14aa2f8729989a9a
|
[
"Apache-2.0"
] | null | null | null |
cls/p3.py
|
sanchez0623/zsq.LearningPython
|
419df031a2a905fe7d7c2dfe14aa2f8729989a9a
|
[
"Apache-2.0"
] | null | null | null |
from sub.p11 import a
# 会先执行__init__文件
| 13
| 21
| 0.794872
| 8
| 39
| 3.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.153846
| 39
| 3
| 22
| 13
| 0.757576
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
48a89ae9cf100c8b0962a1a7a5b9d18a8c38ea45
| 220
|
py
|
Python
|
cluster_tools/__init__.py
|
ninastijepovic/MasterThesis
|
2579f1e74c0ce404f350a6d441e273b6aef4eadc
|
[
"MIT"
] | null | null | null |
cluster_tools/__init__.py
|
ninastijepovic/MasterThesis
|
2579f1e74c0ce404f350a6d441e273b6aef4eadc
|
[
"MIT"
] | null | null | null |
cluster_tools/__init__.py
|
ninastijepovic/MasterThesis
|
2579f1e74c0ce404f350a6d441e273b6aef4eadc
|
[
"MIT"
] | null | null | null |
# INIT FILE
from .ObservationDao import *
from .autoencoder import *
from .autoencoderpreprocessor import *
from .distanceCalculator import *
from .embedder import *
from .locationwarping import *
from .utils import *
| 22
| 38
| 0.781818
| 23
| 220
| 7.478261
| 0.478261
| 0.348837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 220
| 9
| 39
| 24.444444
| 0.919786
| 0.040909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d2aac76f45c513b4c778c75058f76b377904348f
| 21
|
py
|
Python
|
nlp4j_wrapper/__init__.py
|
justinsowhat/nlp4j-python-wrapper
|
5bcfd32dedf27043a816fbe918b4863a49050a7a
|
[
"MIT"
] | null | null | null |
nlp4j_wrapper/__init__.py
|
justinsowhat/nlp4j-python-wrapper
|
5bcfd32dedf27043a816fbe918b4863a49050a7a
|
[
"MIT"
] | 1
|
2019-10-06T14:26:35.000Z
|
2019-10-10T18:02:09.000Z
|
nlp4j_wrapper/__init__.py
|
justinsowhat/nlp4j-python-wrapper
|
5bcfd32dedf27043a816fbe918b4863a49050a7a
|
[
"MIT"
] | 1
|
2017-07-24T13:02:09.000Z
|
2017-07-24T13:02:09.000Z
|
from .nlp4j import *
| 10.5
| 20
| 0.714286
| 3
| 21
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.190476
| 21
| 1
| 21
| 21
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d2ba6317cb065f775e269d485cbb878f121587da
| 50
|
py
|
Python
|
models/__init__.py
|
Anonymous-926/SM-SGE
|
4a58c2dfcae6301ae5af17a59ecdbe80fbb41522
|
[
"MIT"
] | 4
|
2021-07-06T08:27:42.000Z
|
2021-12-25T06:30:08.000Z
|
models/__init__.py
|
Anonymous-926/SM-SGE
|
4a58c2dfcae6301ae5af17a59ecdbe80fbb41522
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
Anonymous-926/SM-SGE
|
4a58c2dfcae6301ae5af17a59ecdbe80fbb41522
|
[
"MIT"
] | 1
|
2021-04-15T10:50:06.000Z
|
2021-04-15T10:50:06.000Z
|
from .gat import MGRN_S
from .sp_gat import SpGAT
| 16.666667
| 25
| 0.8
| 10
| 50
| 3.8
| 0.7
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 50
| 2
| 26
| 25
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d2e4e62bd0363c8bca998f3ae6348ed744ee9954
| 215
|
py
|
Python
|
library/rpi_inky_layout/__init__.py
|
DoctorU/rpi-inky-layout
|
1b46cdbee3a07c99e254cce297a1b408a347cfb2
|
[
"MIT"
] | 1
|
2021-03-05T12:54:23.000Z
|
2021-03-05T12:54:23.000Z
|
library/rpi_inky_layout/__init__.py
|
DoctorU/rpi-inky-layout
|
1b46cdbee3a07c99e254cce297a1b408a347cfb2
|
[
"MIT"
] | 30
|
2021-01-16T10:09:46.000Z
|
2021-05-01T13:47:48.000Z
|
library/rpi_inky_layout/__init__.py
|
DoctorU/rpi-inky-layout
|
1b46cdbee3a07c99e254cce297a1b408a347cfb2
|
[
"MIT"
] | null | null | null |
__name__ = 'rpi_inky_layout'
from .layout import Layout # noqa: F401
from .rotation import Rotation # noqa: F401
from .position import Position # noqa: F401
from .index_order import IndexOrder # noqa: F401
| 35.833333
| 50
| 0.744186
| 29
| 215
| 5.275862
| 0.448276
| 0.20915
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068571
| 0.186047
| 215
| 5
| 51
| 43
| 0.805714
| 0.2
| 0
| 0
| 0
| 0
| 0.092593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d2f06e8d78a1e4f1413da333583d5a89ded9a0bc
| 4,268
|
py
|
Python
|
experiments/half_cheetah/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/half_cheetah/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
experiments/half_cheetah/process_result.py
|
michalnand/reinforcement_learning_im
|
e29caa2a0b7bca3f9ff45ed949a3d3df3a40c4c1
|
[
"MIT"
] | null | null | null |
import sys
sys.path.insert(0, '../../')
from libs_common.RLStatsCompute import *
import matplotlib.pyplot as plt
result_path = "./results/"
files = []
files.append("./models/ddpg_baseline/run_0/result/result.log")
files.append("./models/ddpg_baseline/run_1/result/result.log")
files.append("./models/ddpg_baseline/run_2/result/result.log")
files.append("./models/ddpg_baseline/run_3/result/result.log")
files.append("./models/ddpg_baseline/run_4/result/result.log")
files.append("./models/ddpg_baseline/run_5/result/result.log")
files.append("./models/ddpg_baseline/run_6/result/result.log")
files.append("./models/ddpg_baseline/run_7/result/result.log")
rl_stats_compute_ddpg = RLStatsCompute(files, result_path + "ddpg_baseline.log")
files = []
files.append("./models/ddpg_curiosity/run_0/result/result.log")
files.append("./models/ddpg_curiosity/run_1/result/result.log")
files.append("./models/ddpg_curiosity/run_2/result/result.log")
files.append("./models/ddpg_curiosity/run_3/result/result.log")
files.append("./models/ddpg_curiosity/run_4/result/result.log")
files.append("./models/ddpg_curiosity/run_5/result/result.log")
files.append("./models/ddpg_curiosity/run_6/result/result.log")
files.append("./models/ddpg_curiosity/run_7/result/result.log")
rl_stats_compute_curiosity = RLStatsCompute(files, result_path + "ddpg_curiosity.log")
plt.cla()
plt.ylabel("score")
plt.xlabel("episode")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_ddpg.games_mean, rl_stats_compute_ddpg.episode_mean, label="ddpg baseline", color='blue')
plt.fill_between(rl_stats_compute_ddpg.games_mean, rl_stats_compute_ddpg.episode_lower, rl_stats_compute_ddpg.episode_upper, color='blue', alpha=0.2)
plt.plot(rl_stats_compute_curiosity.games_mean, rl_stats_compute_curiosity.episode_mean, label="ddpg curiosity", color='red')
plt.fill_between(rl_stats_compute_curiosity.games_mean, rl_stats_compute_curiosity.episode_lower, rl_stats_compute_curiosity.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_episode.png", dpi = 300)
plt.cla()
plt.ylabel("score")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_ddpg.iterations, rl_stats_compute_ddpg.episode_mean, label="ddpg baseline", color='blue')
plt.fill_between(rl_stats_compute_ddpg.iterations, rl_stats_compute_ddpg.episode_lower, rl_stats_compute_ddpg.episode_upper, color='blue', alpha=0.2)
plt.plot(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.episode_mean, label="ddpg curiosity", color='red')
plt.fill_between(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.episode_lower, rl_stats_compute_curiosity.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_iteration.png", dpi = 300)
'''
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.entropy_mean, label="entropy", color='orange')
plt.fill_between(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.entropy_lower, rl_stats_compute_curiosity.entropy_upper, color='orange', alpha=0.2)
plt.plot(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.curiosity_mean, label="curiosity", color='green')
plt.fill_between(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.curiosity_lower, rl_stats_compute_curiosity.curiosity_upper, color='green', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "internal_motivation.png", dpi = 300)
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.forward_loss_mean, label="forward model loss", color='navy')
plt.fill_between(rl_stats_compute_curiosity.iterations, rl_stats_compute_curiosity.forward_loss_lower, rl_stats_compute_curiosity.forward_loss_upper, color='navy', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "forward_model_loss.png", dpi = 300)
'''
| 44.458333
| 174
| 0.803889
| 641
| 4,268
| 5.048362
| 0.117005
| 0.080037
| 0.160074
| 0.184796
| 0.895859
| 0.848578
| 0.819839
| 0.801916
| 0.783375
| 0.571693
| 0
| 0.01356
| 0.049672
| 4,268
| 96
| 175
| 44.458333
| 0.78427
| 0
| 0
| 0.222222
| 0
| 0
| 0.336189
| 0.270048
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9611a2a7903a36b684cd3df3c0d7e05c4654ea51
| 6,549
|
py
|
Python
|
aisimplekit/features/stats.py
|
greyzor/kaggle-codebase
|
ce7226598b69b759f571bc9e54b5fd7acf282c6e
|
[
"MIT"
] | null | null | null |
aisimplekit/features/stats.py
|
greyzor/kaggle-codebase
|
ce7226598b69b759f571bc9e54b5fd7acf282c6e
|
[
"MIT"
] | null | null | null |
aisimplekit/features/stats.py
|
greyzor/kaggle-codebase
|
ce7226598b69b759f571bc9e54b5fd7acf282c6e
|
[
"MIT"
] | null | null | null |
"""
Some statistics extraction helpers.
"""
import gc
def do_count(df, group_cols, agg_name, agg_type='uint32', show_max=False, show_agg=True):
"""Count occurences on groups of columns.
:param df: input dataframe.
:type df: pd.DataFrame
:param group_cols: the columns we want to group on.
:type group_cols: list
:param agg_name: aggregation column name
:type agg_name: str
:param agg_type: type of new aggregated column
:type agg_type: str
:param show_max: debug option to show max value of aggregated column
:type show_max: bool
:param show_agg: debug option to show info on aggregation
:type show_agg: bool
:returns: the dataframe with a the new aggregated column
:rtype: pd.DataFrame
"""
if show_agg:
print( "Aggregating by ", group_cols , '...' )
prev_idx = df.index
gp = df[group_cols][group_cols].groupby(group_cols).size().rename(agg_name).to_frame().reset_index()
df = df.merge(gp, on=group_cols, how='left', left_index=True)
df.index = prev_idx
del(gp)
if show_max:
print( agg_name + " max value = ", df[agg_name].max() )
df[agg_name] = df[agg_name].astype(agg_type)
gc.collect()
return df
def do_countuniq(df, group_cols, counted, agg_name, agg_type='uint32', show_max=False, show_agg=True):
"""Count unique occurences of a column after grouping by other columns.
:param df: input dataframe.
:type df: pd.DataFrame
:param group_cols: the columns we want to group on.
:type group_cols: list
:param counted: the column being uniquely counted
:type counted: str
:param agg_name: aggregation column name
:type agg_name: str
:param agg_type: type of new aggregated column
:type agg_type: str
:param show_max: debug option to show max value of aggregated column
:type show_max: bool
:param show_agg: debug option to show info on aggregation
:type show_agg: bool
:returns: the dataframe with a the new aggregated column
:rtype: pd.DataFrame
"""
if show_agg:
print( "Counting unique ", counted, " by ", group_cols , '...' )
prev_idx = df.index
gp = df[group_cols+[counted]].groupby(group_cols)[counted].nunique().reset_index().rename(columns={counted:agg_name})
df = df.merge(gp, on=group_cols, how='left', left_index=True)
df.index = prev_idx
del(gp)
if show_max:
print( agg_name + " max value = ", df[agg_name].max() )
df[agg_name] = df[agg_name].astype(agg_type)
gc.collect()
return df
def do_cumcount(df, group_cols, counted, agg_name, agg_type='uint32', show_max=False, show_agg=True):
"""Cumulatively count a column after grouping by other columns.
:param df: input dataframe.
:type df: pd.DataFrame
:param group_cols: the columns we want to group on.
:type group_cols: list
:param counted: the column being cumulatively counted
:type counted: str
:param agg_name: aggregation column name
:type agg_name: str
:param agg_type: type of new aggregated column
:type agg_type: str
:param show_max: debug option to show max value of aggregated column
:type show_max: bool
:param show_agg: debug option to show info on aggregation
:type show_agg: bool
:returns: the dataframe with a the new aggregated column
:rtype: pd.DataFrame
"""
if show_agg:
print( "Cumulative count by ", group_cols , '...' )
gp = df[group_cols+[counted]].groupby(group_cols)[counted].cumcount()
df[agg_name] = gp.values
del(gp)
if show_max:
print( agg_name + " max value = ", df[agg_name].max() )
df[agg_name] = df[agg_name].astype(agg_type)
gc.collect()
return df
def do_mean(df, group_cols, counted, agg_name, agg_type='float32', show_max=False, show_agg=True):
"""Compute mean of a column values after grouping by other columns.
:param df: input dataframe.
:type df: pd.DataFrame
:param group_cols: the columns we want to group on.
:type group_cols: list
:param counted: the column for which we want to compute the average of values
:type counted: str
:param agg_name: aggregation column name
:type agg_name: str
:param agg_type: type of new aggregated column
:type agg_type: str
:param show_max: debug option to show max value of aggregated column
:type show_max: bool
:param show_agg: debug option to show info on aggregation
:type show_agg: bool
:returns: the dataframe with a the new aggregated column
:rtype: pd.DataFrame
"""
if show_agg:
print( "Calculating mean of ", counted, " by ", group_cols , '...' )
prev_idx = df.index
gp = df[group_cols+[counted]].groupby(group_cols)[counted].mean().reset_index().rename(columns={counted:agg_name})
df = df.merge(gp, on=group_cols, how='left', left_index=True)
df.index = prev_idx
del(gp)
if show_max:
print( agg_name + " max value = ", df[agg_name].max() )
df[agg_name] = df[agg_name].astype(agg_type)
gc.collect()
return df
def do_var(df, group_cols, counted, agg_name, agg_type='float32', show_max=False, show_agg=True):
"""Compute variance of a column values after grouping by other columns.
:param df: input dataframe.
:type df: pd.DataFrame
:param group_cols: the columns we want to group on.
:type group_cols: list
:param counted: the column for which we want to compute the variance of values
:type counted: str
:param agg_name: aggregation column name
:type agg_name: str
:param agg_type: type of new aggregated column
:type agg_type: str
:param show_max: debug option to show max value of aggregated column
:type show_max: bool
:param show_agg: debug option to show info on aggregation
:type show_agg: bool
:returns: the dataframe with a the new aggregated column
:rtype: pd.DataFrame
"""
if show_agg:
print( "Calculating variance of ", counted, " by ", group_cols , '...' )
prev_idx = df.index
gp = df[group_cols+[counted]].groupby(group_cols)[counted].var().reset_index().rename(columns={counted:agg_name})
df = df.merge(gp, on=group_cols, how='left', left_index=True)
df.index = prev_idx
del(gp)
if show_max:
print( agg_name + " max value = ", df[agg_name].max() )
df[agg_name] = df[agg_name].astype(agg_type)
gc.collect()
return df
# List of exposed symbols
__all__ = [
'do_count',
'do_countuniq',
'do_cumcount',
'do_mean',
'do_var'
]
| 34.650794
| 121
| 0.676592
| 989
| 6,549
| 4.312437
| 0.095046
| 0.065651
| 0.033763
| 0.039859
| 0.904103
| 0.904103
| 0.904103
| 0.904103
| 0.904103
| 0.894021
| 0
| 0.001956
| 0.219423
| 6,549
| 189
| 122
| 34.650794
| 0.832355
| 0.490915
| 0
| 0.661972
| 0
| 0
| 0.094098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070423
| false
| 0
| 0.014085
| 0
| 0.15493
| 0.140845
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
96161d432876641b21eabf32189ba8c19ab86953
| 106
|
py
|
Python
|
10/02/1/call.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | null | null | null |
10/02/1/call.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | 70
|
2017-06-01T11:02:51.000Z
|
2017-06-30T00:35:32.000Z
|
10/02/1/call.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | null | null | null |
import package1.module
import package2.module
package1.module.some_method()
package2.module.some_method()
| 21.2
| 29
| 0.849057
| 14
| 106
| 6.285714
| 0.428571
| 0.318182
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0.056604
| 106
| 4
| 30
| 26.5
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
824c1dc712647a9fd06f14f78e86a5a2c789c3f2
| 29,582
|
py
|
Python
|
kongmingclient/tests/unit/common/test_http.py
|
zhengzhenyu/python-kongmingclient
|
31bac4ff1206c61fc1625665bdddd5799c3bae4c
|
[
"Apache-2.0"
] | null | null | null |
kongmingclient/tests/unit/common/test_http.py
|
zhengzhenyu/python-kongmingclient
|
31bac4ff1206c61fc1625665bdddd5799c3bae4c
|
[
"Apache-2.0"
] | null | null | null |
kongmingclient/tests/unit/common/test_http.py
|
zhengzhenyu/python-kongmingclient
|
31bac4ff1206c61fc1625665bdddd5799c3bae4c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Huawei, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import socket
from keystoneauth1 import adapter
import mock
from osc_lib.tests import fakes as osc_fakes
from oslo_serialization import jsonutils
import six
from moganclient.common import exceptions as exc
from moganclient.common import http
from moganclient.common import utils
from moganclient.tests.unit import base
from moganclient.tests.unit import fakes
@mock.patch('moganclient.common.http.requests.request')
class TestHttpClient(base.TestBase):
def setUp(self):
super(TestHttpClient, self).setUp()
def test_http_raw_request(self, mock_request):
headers = {'User-Agent': 'python-moganclient',
'Content-Type': 'application/octet-stream'}
mock_request.return_value = fakes.FakeHTTPResponse(200, 'OK', {}, '')
client = http.HTTPClient('http://example.com:6688')
resp, body = client.raw_request('GET', '/prefix')
self.assertEqual(200, resp.status_code)
self.assertEqual('', ''.join([x for x in resp.content]))
mock_request.assert_called_once_with('GET',
'http://example.com:6688/prefix',
allow_redirects=False,
headers=headers)
def test_token_or_credentials(self, mock_request):
# Record a 200
fake200 = fakes.FakeHTTPResponse(200, 'OK', {}, '')
mock_request.side_effect = [fake200, fake200, fake200]
# Replay, create client, assert
client = http.HTTPClient('http://example.com:6688')
resp, body = client.raw_request('GET', '')
self.assertEqual(200, resp.status_code)
client.username = osc_fakes.USERNAME
client.password = osc_fakes.PASSWORD
resp, body = client.raw_request('GET', '')
self.assertEqual(200, resp.status_code)
client.auth_token = osc_fakes.AUTH_TOKEN
resp, body = client.raw_request('GET', '')
self.assertEqual(200, resp.status_code)
# no token or credentials
mock_request.assert_has_calls([
mock.call('GET', 'http://example.com:6688',
allow_redirects=False,
headers={'User-Agent': 'python-moganclient',
'Content-Type': 'application/octet-stream'}),
mock.call('GET', 'http://example.com:6688',
allow_redirects=False,
headers={'User-Agent': 'python-moganclient',
'X-Auth-Key': osc_fakes.PASSWORD,
'X-Auth-User': osc_fakes.USERNAME,
'Content-Type': 'application/octet-stream'}),
mock.call('GET', 'http://example.com:6688',
allow_redirects=False,
headers={'User-Agent': 'python-moganclient',
'X-Auth-Token': osc_fakes.AUTH_TOKEN,
'Content-Type': 'application/octet-stream'})
])
def test_region_name(self, mock_request):
# Record a 200
fake200 = fakes.FakeHTTPResponse(200, 'OK', {}, '')
mock_request.return_value = fake200
client = http.HTTPClient('http://example.com:6688')
client.region_name = osc_fakes.REGION_NAME
resp, body = client.raw_request('GET', '')
self.assertEqual(200, resp.status_code)
mock_request.assert_called_once_with(
'GET', 'http://example.com:6688',
allow_redirects=False,
headers={'X-Region-Name': osc_fakes.REGION_NAME,
'User-Agent': 'python-moganclient',
'Content-Type': 'application/octet-stream'})
def test_http_json_request(self, mock_request):
# Record a 200
mock_request.return_value = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
client = http.HTTPClient('http://example.com:6688')
resp, body = client.json_request('GET', '')
self.assertEqual(200, resp.status_code)
self.assertEqual({}, body)
mock_request.assert_called_once_with(
'GET', 'http://example.com:6688',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
def test_http_json_request_argument_passed_to_requests(self, mock_request):
"""Check that we have sent the proper arguments to requests."""
# Record a 200
mock_request.return_value = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
client = http.HTTPClient('http://example.com:6688')
client.verify_cert = True
client.cert_file = 'RANDOM_CERT_FILE'
client.key_file = 'RANDOM_KEY_FILE'
client.auth_url = osc_fakes.AUTH_URL
resp, body = client.json_request('POST', '', data='text')
self.assertEqual(200, resp.status_code)
self.assertEqual({}, body)
mock_request.assert_called_once_with(
'POST', 'http://example.com:6688',
allow_redirects=False,
cert=('RANDOM_CERT_FILE', 'RANDOM_KEY_FILE'),
verify=True,
data='"text"',
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'X-Auth-Url': osc_fakes.AUTH_URL,
'User-Agent': 'python-moganclient'})
def test_http_json_request_w_req_body(self, mock_request):
# Record a 200
mock_request.return_value = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
client = http.HTTPClient('http://example.com:6688')
resp, body = client.json_request('POST', '', data='test-body')
self.assertEqual(200, resp.status_code)
self.assertEqual({}, body)
mock_request.assert_called_once_with(
'POST', 'http://example.com:6688',
data='"test-body"',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
def test_http_json_request_non_json_resp_cont_type(self, mock_request):
# Record a 200
mock_request.return_value = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'not/json'}, '{}')
client = http.HTTPClient('http://example.com:6688')
resp, body = client.json_request('POST', '', data='test-data')
self.assertEqual(200, resp.status_code)
self.assertIsNone(body)
mock_request.assert_called_once_with(
'POST', 'http://example.com:6688', data='"test-data"',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
def test_http_json_request_invalid_json(self, mock_request):
# Record a 200
mock_request.return_value = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, 'invalid-json')
client = http.HTTPClient('http://example.com:6688')
resp, body = client.json_request('GET', '')
self.assertEqual(200, resp.status_code)
self.assertEqual('invalid-json', body)
mock_request.assert_called_once_with(
'GET', 'http://example.com:6688',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
def test_http_json_request_redirect_delete(self, mock_request):
mock_request.side_effect = [
fakes.FakeHTTPResponse(
302, 'Found',
{'location': 'http://example.com:6688/foo/bar'},
''),
fakes.FakeHTTPResponse(
200, 'OK',
{'Content-Type': 'application/json'},
'{}')]
client = http.HTTPClient('http://example.com:6688/foo')
resp, body = client.json_request('DELETE', '')
self.assertEqual(200, resp.status_code)
mock_request.assert_has_calls([
mock.call('DELETE', 'http://example.com:6688/foo',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}),
mock.call('DELETE', 'http://example.com:6688/foo/bar',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
])
def test_http_json_request_redirect_post(self, mock_request):
mock_request.side_effect = [
fakes.FakeHTTPResponse(
302, 'Found',
{'location': 'http://example.com:6688/foo/bar'},
''),
fakes.FakeHTTPResponse(
200, 'OK',
{'Content-Type': 'application/json'},
'{}')]
client = http.HTTPClient('http://example.com:6688/foo')
resp, body = client.json_request('POST', '')
self.assertEqual(200, resp.status_code)
mock_request.assert_has_calls([
mock.call('POST', 'http://example.com:6688/foo',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}),
mock.call('POST', 'http://example.com:6688/foo/bar',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
])
def test_http_json_request_redirect_put(self, mock_request):
mock_request.side_effect = [
fakes.FakeHTTPResponse(
302, 'Found',
{'location': 'http://example.com:6688/foo/bar'},
''),
fakes.FakeHTTPResponse(
200, 'OK',
{'Content-Type': 'application/json'},
'{}')]
client = http.HTTPClient('http://example.com:6688/foo')
resp, body = client.json_request('PUT', '')
self.assertEqual(200, resp.status_code)
mock_request.assert_has_calls([
mock.call('PUT', 'http://example.com:6688/foo',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}),
mock.call('PUT', 'http://example.com:6688/foo/bar',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
])
def test_http_json_request_redirect_diff_location(self, mock_request):
mock_request.side_effect = [
fakes.FakeHTTPResponse(
302, 'Found',
{'location': 'http://example.com:6688/diff_lcation'},
''),
fakes.FakeHTTPResponse(
200, 'OK',
{'Content-Type': 'application/json'},
'{}')]
client = http.HTTPClient('http://example.com:6688/foo')
resp, body = client.json_request('PUT', '')
self.assertEqual(200, resp.status_code)
mock_request.assert_has_calls([
mock.call('PUT', 'http://example.com:6688/foo',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}),
mock.call('PUT', 'http://example.com:6688/diff_lcation',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
])
def test_http_json_request_redirect_error_without_location(self,
mock_request):
mock_request.return_value = fakes.FakeHTTPResponse(
302, 'Found', {}, '')
client = http.HTTPClient('http://example.com:6688/foo')
self.assertRaises(exc.EndpointException,
client.json_request, 'DELETE', '')
mock_request.assert_called_once_with(
'DELETE', 'http://example.com:6688/foo',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
def test_http_json_request_redirect_get(self, mock_request):
# Record the 302
mock_request.side_effect = [
fakes.FakeHTTPResponse(
302, 'Found',
{'location': 'http://example.com:6688'},
''),
fakes.FakeHTTPResponse(
200, 'OK',
{'Content-Type': 'application/json'},
'{}')]
client = http.HTTPClient('http://example.com:6688')
resp, body = client.json_request('GET', '')
self.assertEqual(200, resp.status_code)
self.assertEqual({}, body)
mock_request.assert_has_calls([
mock.call('GET', 'http://example.com:6688',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}),
mock.call('GET', 'http://example.com:6688',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
])
def test_http_404_json_request(self, mock_request):
mock_request.return_value = fakes.FakeHTTPResponse(
404, 'Not Found', {'Content-Type': 'application/json'}, '')
client = http.HTTPClient('http://example.com:6688')
e = self.assertRaises(exc.NotFound, client.json_request, 'GET', '')
# Assert that the raised exception can be converted to string
self.assertIsNotNone(str(e))
# Record a 404
mock_request.assert_called_once_with(
'GET', 'http://example.com:6688',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
def test_http_300_json_request(self, mock_request):
mock_request.return_value = fakes.FakeHTTPResponse(
300, 'OK', {'Content-Type': 'application/json'}, '')
client = http.HTTPClient('http://example.com:6688')
e = self.assertRaises(
exc.MultipleChoices, client.json_request, 'GET', '')
# Assert that the raised exception can be converted to string
self.assertIsNotNone(str(e))
# Record a 300
mock_request.assert_called_once_with(
'GET', 'http://example.com:6688',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'})
def test_fake_json_request(self, mock_request):
headers = {'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}
mock_request.side_effect = [socket.gaierror]
client = http.HTTPClient('fake://example.com:6688')
self.assertRaises(exc.EndpointNotFound,
client.json_request, "GET", "/")
mock_request.assert_called_once_with('GET', 'fake://example.com:6688/',
allow_redirects=False,
headers=headers)
def test_http_request_socket_error(self, mock_request):
headers = {'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}
mock_request.side_effect = [socket.error]
client = http.HTTPClient('http://example.com:6688')
self.assertRaises(exc.ConnectionError,
client.json_request, "GET", "/")
mock_request.assert_called_once_with('GET', 'http://example.com:6688/',
allow_redirects=False,
headers=headers)
def test_http_request_socket_timeout(self, mock_request):
headers = {'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'}
mock_request.side_effect = [socket.timeout]
client = http.HTTPClient('http://example.com:6688')
self.assertRaises(exc.ConnectionError,
client.json_request, "GET", "/")
mock_request.assert_called_once_with('GET', 'http://example.com:6688/',
allow_redirects=False,
headers=headers)
def test_http_request_specify_timeout(self, mock_request):
mock_request.return_value = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
client = http.HTTPClient('http://example.com:6688', timeout='123')
resp, body = client.json_request('GET', '')
self.assertEqual(200, resp.status_code)
self.assertEqual({}, body)
mock_request.assert_called_once_with(
'GET', 'http://example.com:6688',
allow_redirects=False,
headers={'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'python-moganclient'},
timeout=float(123))
def test_get_system_ca_file(self, mock_request):
chosen = '/etc/ssl/certs/ca-certificates.crt'
with mock.patch('os.path.exists') as mock_os:
mock_os.return_value = chosen
ca = http.get_system_ca_file()
self.assertEqual(chosen, ca)
mock_os.assert_called_once_with(chosen)
def test_insecure_verify_cert_none(self, mock_request):
client = http.HTTPClient('https://foo', insecure=True)
self.assertFalse(client.verify_cert)
def test_passed_cert_to_verify_cert(self, mock_request):
client = http.HTTPClient('https://foo', ca_file="NOWHERE")
self.assertEqual("NOWHERE", client.verify_cert)
with mock.patch('moganclient.common.http.get_system_ca_file') as gsf:
gsf.return_value = "SOMEWHERE"
client = http.HTTPClient('https://foo')
self.assertEqual("SOMEWHERE", client.verify_cert)
def test_methods(self, mock_request):
fake = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
mock_request.return_value = fake
client = http.HTTPClient('http://example.com:6688')
methods = [client.get, client.put, client.post, client.patch,
client.delete, client.head]
for method in methods:
resp, body = method('')
self.assertEqual(200, resp.status_code)
class TestSessionClient(base.TestBase):
def setUp(self):
super(TestSessionClient, self).setUp()
self.request = mock.patch.object(adapter.LegacyJsonAdapter,
'request').start()
def test_session_simple_request(self):
resp = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/octet-stream'}, '{}')
self.request.return_value = (resp, {})
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY)
resp, body = client.request(method='GET', url='')
self.assertEqual(200, resp.status_code)
self.assertEqual('{}', ''.join([x for x in resp.content]))
self.assertEqual({}, body)
def test_session_json_request(self):
fake = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'},
jsonutils.dumps({'some': 'body'}))
self.request.return_value = (fake, {'some': 'body'})
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY)
resp, body = client.request('', 'GET')
self.assertEqual(200, resp.status_code)
self.assertEqual({'some': 'body'}, resp.json())
self.assertEqual({'some': 'body'}, body)
def test_404_error_response(self):
fake = fakes.FakeHTTPResponse(
404, 'Not Found', {'Content-Type': 'application/json'}, '')
self.request.return_value = (fake, '')
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY)
e = self.assertRaises(exc.NotFound,
client.request, '', 'GET')
# Assert that the raised exception can be converted to string
self.assertIsNotNone(six.text_type(e))
def test_redirect_302_location(self):
fake1 = fakes.FakeHTTPResponse(
302, 'OK', {'location': 'http://no.where/ishere'}, '')
fake2 = fakes.FakeHTTPResponse(200, 'OK',
{'Content-Type': 'application/json'},
jsonutils.dumps({'Mount': 'Fuji'}))
self.request.side_effect = [
(fake1, None), (fake2, {'Mount': 'Fuji'})]
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY,
endpoint_override='http://no.where/')
resp, body = client.request('', 'GET', redirect=True)
self.assertEqual(200, resp.status_code)
self.assertEqual({'Mount': 'Fuji'}, utils.get_response_body(resp))
self.assertEqual({'Mount': 'Fuji'}, body)
self.assertEqual(('', 'GET'), self.request.call_args_list[0][0])
self.assertEqual(('ishere', 'GET'), self.request.call_args_list[1][0])
for call in self.request.call_args_list:
self.assertEqual({'user_agent': 'python-moganclient',
'raise_exc': False,
'redirect': True}, call[1])
def test_302_location_not_override(self):
fake1 = fakes.FakeHTTPResponse(
302, 'OK', {'location': 'http://no.where/ishere'}, '')
fake2 = fakes.FakeHTTPResponse(200, 'OK',
{'Content-Type': 'application/json'},
jsonutils.dumps({'Mount': 'Fuji'}))
self.request.side_effect = [
(fake1, None), (fake2, {'Mount': 'Fuji'})]
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY,
endpoint_override='http://endpoint/')
resp, body = client.request('', 'GET', redirect=True)
self.assertEqual(200, resp.status_code)
self.assertEqual({'Mount': 'Fuji'}, utils.get_response_body(resp))
self.assertEqual({'Mount': 'Fuji'}, body)
self.assertEqual(('', 'GET'), self.request.call_args_list[0][0])
self.assertEqual(('http://no.where/ishere',
'GET'), self.request.call_args_list[1][0])
for call in self.request.call_args_list:
self.assertEqual({'user_agent': 'python-moganclient',
'raise_exc': False,
'redirect': True}, call[1])
def test_redirect_302_no_location(self):
fake = fakes.FakeHTTPResponse(
302, 'OK', {}, '')
self.request.side_effect = [(fake, '')]
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY)
e = self.assertRaises(exc.EndpointException,
client.request, '', 'GET', redirect=True)
self.assertEqual("Location not returned with redirect",
six.text_type(e))
def test_no_redirect_302_no_location(self):
fake = fakes.FakeHTTPResponse(302, 'OK',
{'location': 'http://no.where/ishere'},
'')
self.request.side_effect = [(fake, '')]
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY)
resp, body = client.request('', 'GET')
self.assertEqual(fake, resp)
def test_300_error_response(self):
fake = fakes.FakeHTTPResponse(
300, 'FAIL', {'Content-Type': 'application/octet-stream'}, '')
self.request.return_value = (fake, '')
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY)
e = self.assertRaises(exc.MultipleChoices,
client.request, '', 'GET')
# Assert that the raised exception can be converted to string
self.assertIsNotNone(six.text_type(e))
def test_506_error_response(self):
# for 506 we don't have specific exception type
fake = fakes.FakeHTTPResponse(
506, 'FAIL', {'Content-Type': 'application/octet-stream'}, '')
self.request.return_value = (fake, '')
client = http.SessionClient(session=mock.ANY,
auth=mock.ANY)
e = self.assertRaises(exc.HttpServerError,
client.request, '', 'GET')
self.assertEqual(506, e.status_code)
def test_kwargs(self):
fake = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
kwargs = dict(endpoint_override='http://no.where/',
data='some_data')
client = http.SessionClient(mock.ANY)
self.request.return_value = (fake, {})
resp, body = client.request('', 'GET', **kwargs)
self.assertEqual({'endpoint_override': 'http://no.where/',
'json': 'some_data',
'user_agent': 'python-moganclient',
'raise_exc': False}, self.request.call_args[1])
self.assertEqual(200, resp.status_code)
self.assertEqual({}, body)
self.assertEqual({}, utils.get_response_body(resp))
@mock.patch.object(jsonutils, 'dumps')
def test_kwargs_with_files(self, mock_dumps):
fake = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
mock_dumps.return_value = "{'files': test}}"
data = six.BytesIO(b'test')
kwargs = {'endpoint_override': 'http://no.where/',
'data': {'files': data}}
client = http.SessionClient(mock.ANY)
self.request.return_value = (fake, {})
resp, body = client.request('', 'GET', **kwargs)
self.assertEqual({'endpoint_override': 'http://no.where/',
'json': {'files': data},
'user_agent': 'python-moganclient',
'raise_exc': False}, self.request.call_args[1])
self.assertEqual(200, resp.status_code)
self.assertEqual({}, body)
self.assertEqual({}, utils.get_response_body(resp))
def test_methods(self):
fake = fakes.FakeHTTPResponse(
200, 'OK', {'Content-Type': 'application/json'}, '{}')
self.request.return_value = (fake, {})
client = http.SessionClient(mock.ANY)
methods = [client.get, client.put, client.post, client.patch,
client.delete, client.head]
for method in methods:
resp, body = method('')
self.assertEqual(200, resp.status_code)
def test_credentials_headers(self):
client = http.SessionClient(mock.ANY)
self.assertEqual({}, client.credentials_headers())
| 43.502941
| 79
| 0.554594
| 2,958
| 29,582
| 5.387762
| 0.089249
| 0.044864
| 0.046558
| 0.057602
| 0.834787
| 0.807868
| 0.772856
| 0.755161
| 0.737027
| 0.718015
| 0
| 0.025098
| 0.311744
| 29,582
| 679
| 80
| 43.56701
| 0.757662
| 0.037928
| 0
| 0.683241
| 0
| 0
| 0.193674
| 0.012489
| 0
| 0
| 0
| 0
| 0.174954
| 1
| 0.071823
| false
| 0.007366
| 0.020258
| 0
| 0.095764
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
82822bb3347722d6666add2853b5349ee1389109
| 2,804
|
py
|
Python
|
tests/test_integration_unix.py
|
sezuan/aiospamc
|
9a3f24b785229f3abb2abc5e0e5f94f130233946
|
[
"MIT"
] | null | null | null |
tests/test_integration_unix.py
|
sezuan/aiospamc
|
9a3f24b785229f3abb2abc5e0e5f94f130233946
|
[
"MIT"
] | null | null | null |
tests/test_integration_unix.py
|
sezuan/aiospamc
|
9a3f24b785229f3abb2abc5e0e5f94f130233946
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
import pytest
import aiospamc
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_check(spamd, spam):
result = await aiospamc.check(spam, socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_headers(spamd, spam):
result = await aiospamc.headers(spam, socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_ping(spamd):
result = await aiospamc.ping(socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_process(spamd, spam):
result = await aiospamc.process(spam, socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_report(spamd, spam):
result = await aiospamc.report(spam, socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_report_if_spam(spamd, spam):
result = await aiospamc.report_if_spam(spam, socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_symbols(spamd, spam):
result = await aiospamc.symbols(spam, socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_tell(spamd, spam):
result = await aiospamc.tell(
message=spam, message_class="spam", socket_path=spamd["unix"]["socket"]
)
assert 0 == result.status_code
@pytest.mark.skipif(
sys.platform == "win32", reason="Unix sockets not supported on Windows"
)
@pytest.mark.integration
@pytest.mark.asyncio
async def test_message_without_newline(spamd):
result = await aiospamc.check(message=b"acb", socket_path=spamd["unix"]["socket"])
assert 0 == result.status_code
| 25.724771
| 86
| 0.725036
| 379
| 2,804
| 5.274406
| 0.126649
| 0.135068
| 0.072036
| 0.085543
| 0.88044
| 0.810405
| 0.776388
| 0.776388
| 0.776388
| 0.776388
| 0
| 0.011609
| 0.1398
| 2,804
| 108
| 87
| 25.962963
| 0.817164
| 0.007489
| 0
| 0.584416
| 0
| 0
| 0.17074
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 1
| 0
| false
| 0
| 0.038961
| 0
| 0.038961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
829c53ae31b2937a0b92aa65b5756f3e4cf95283
| 439
|
py
|
Python
|
src/controller/src/testastar.py
|
robot-gang/maze-runner
|
03249b17e8dd2b39acd3110addb19c8ffcc59820
|
[
"MIT"
] | null | null | null |
src/controller/src/testastar.py
|
robot-gang/maze-runner
|
03249b17e8dd2b39acd3110addb19c8ffcc59820
|
[
"MIT"
] | null | null | null |
src/controller/src/testastar.py
|
robot-gang/maze-runner
|
03249b17e8dd2b39acd3110addb19c8ffcc59820
|
[
"MIT"
] | null | null | null |
from astar import *
# map = [
# [0, 1, 1, 0, 0, 1],
# [0, 0, 1, 1, 0, 0],
# [0, 0, 0, 0, 1, 0],
# [1, 0, 0, 1, 1, 0],
# [1, 1, 0, 1, 0, 0],
# [0, 0, 0, 0, 0, 0]]
map = [[1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 1],
[1, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0],
[1, 1, 1, 1, 1, 1]]
start = (3, 2)
goal = (3, 5)
planner = Planner(map, start, goal)
path = planner.astar()
print(path)
path1 = planner.findPath()
print(path1)
| 19.086957
| 35
| 0.412301
| 92
| 439
| 1.967391
| 0.173913
| 0.265193
| 0.248619
| 0.243094
| 0.364641
| 0.353591
| 0.276243
| 0.088398
| 0
| 0
| 0
| 0.235294
| 0.302961
| 439
| 22
| 36
| 19.954545
| 0.356209
| 0.289294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0.153846
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
829e2b61894c1b09d911638b3a9ae1d98d658ecd
| 76
|
py
|
Python
|
dbricks_setup/scope/__init__.py
|
SindreOsnes/dbricks_setup
|
8c71c6b33cee1b17aac23d2dca68d6ce602546c7
|
[
"MIT"
] | null | null | null |
dbricks_setup/scope/__init__.py
|
SindreOsnes/dbricks_setup
|
8c71c6b33cee1b17aac23d2dca68d6ce602546c7
|
[
"MIT"
] | null | null | null |
dbricks_setup/scope/__init__.py
|
SindreOsnes/dbricks_setup
|
8c71c6b33cee1b17aac23d2dca68d6ce602546c7
|
[
"MIT"
] | null | null | null |
from ._delete import delete_scope_cli
from ._update import update_scope_cli
| 25.333333
| 37
| 0.868421
| 12
| 76
| 5
| 0.5
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 76
| 2
| 38
| 38
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
82b941670c01da07cb847f1b20bdcf0068c1f6d2
| 7,589
|
py
|
Python
|
src/middleware/connection.py
|
PatricioIribarneCatella/TP-NBA-statistics
|
e5a9a67a56614c63e4761316403e8ba6181ba447
|
[
"MIT"
] | null | null | null |
src/middleware/connection.py
|
PatricioIribarneCatella/TP-NBA-statistics
|
e5a9a67a56614c63e4761316403e8ba6181ba447
|
[
"MIT"
] | null | null | null |
src/middleware/connection.py
|
PatricioIribarneCatella/TP-NBA-statistics
|
e5a9a67a56614c63e4761316403e8ba6181ba447
|
[
"MIT"
] | null | null | null |
import sys
import zmq
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
class ReplicationSocket(object):
def __init__(self, config):
# Get the context and create the socket
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUB)
# Bind the 'publisher' socket
net_config = config["bind"]
self.socket.bind("tcp://{}:{}".format(
net_config["ip"],
net_config["port"]))
self.socket.setsockopt(zmq.LINGER, -1)
def send(self, msg):
self.socket.send_string(msg)
def close(self):
self.socket.close()
class SuscriberSocket(object):
def __init__(self, config, topicids):
# Get the context and create the socket
self.context = zmq.Context()
self.socket = self.context.socket(zmq.SUB)
# Connect to publisher
net_config = config["connect"]
self.socket.connect("tcp://{}:{}".format(
net_config["ip"],
net_config["port"]))
self.socket.setsockopt(zmq.LINGER, -1)
# Set the suscriber topics
for tid in topicids:
self.socket.setsockopt_string(zmq.SUBSCRIBE, str(tid))
def recv(self):
return self.socket.recv_string()
def close(self):
self.socket.close()
class PusherSocket(object):
def __init__(self):
# Get the context and create the socket
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUSH)
self.socket.setsockopt(zmq.LINGER, -1)
def send(self, msg):
self.socket.send_string(msg)
def close(self):
self.socket.close()
class DispatcherSocket(PusherSocket):
def __init__(self, config):
super(DispatcherSocket, self).__init__()
# Bind the 'dispatcher'/'pusher' socket
net_config = config["bind"]
self.socket.bind("tcp://{}:{}".format(
net_config["ip"],
net_config["port"]))
class ProducerSocket(PusherSocket):
def __init__(self, config):
super(ProducerSocket, self).__init__()
# Connect the 'dispatcher'/'pusher' socket
net_config = config["connect"]
self.socket.connect("tcp://{}:{}".format(
net_config["ip"],
net_config["port"]))
class GatherSocket(object):
def __init__(self, config):
# Get the context and create the socket
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PULL)
# Bind the 'gather'/'puller' socket
net_config = config["bind"]
self.socket.bind("tcp://{}:{}".format(
net_config["ip"],
net_config["port"]))
self.socket.setsockopt(zmq.LINGER, -1)
def recv(self):
return self.socket.recv_string()
def close(self):
self.socket.close()
class InputWorkerSocket(object):
def __init__(self, config):
# Get the context and create sockets
self.context = zmq.Context()
net_config = config["nodes"]
# Channel to receive work
net_to_dispatcher = net_config["dispatcher"]["connect"]
self.work_socket = self.context.socket(zmq.PULL)
self.work_socket.connect("tcp://{}:{}".format(
net_to_dispatcher["ip"],
net_to_dispatcher["port"]))
self.work_socket.setsockopt(zmq.LINGER, -1)
# Channel to receive stop signal
net_to_signal = net_config["signal"]["connect"]
self.control_socket = self.context.socket(zmq.SUB)
self.control_socket.connect("tcp://{}:{}".format(
net_to_signal["ip"],
net_to_signal["port"]))
self.control_socket.setsockopt_string(zmq.SUBSCRIBE, "")
self.control_socket.setsockopt(zmq.LINGER, -1)
# Channels to send processed work
net_to_filters = net_config["filters"]
self.sockets = []
for net in net_to_filters:
s = self.context.socket(zmq.PUSH)
s.connect("tcp://{}:{}".format(net["ip"], net["port"]))
s.setsockopt(zmq.LINGER, -1)
self.sockets.append(s)
self.poll_sockets = {
"work": self.work_socket,
"control": self.control_socket,
}
# Poller multiplexer
self.poller = zmq.Poller()
self.poller.register(self.work_socket, zmq.POLLIN)
self.poller.register(self.control_socket, zmq.POLLIN)
# Set the polling with a
# time-out of 0.5 seconds
def poll(self):
return dict(self.poller.poll(500))
def test(self, sockets, sock_name):
s = self.poll_sockets[sock_name]
return sockets.get(s) == zmq.POLLIN
def recv(self, sockets, sock_name):
return self.poll_sockets[sock_name].recv_string()
def send(self, data):
# Send to all waiting filters
for s in self.sockets:
s.send_string(data)
def close(self):
self.work_socket.close()
self.control_socket.close()
for s in self.sockets:
s.close()
class WorkerSocket(object):
def __init__(self, config):
# Get the context and create sockets
self.context = zmq.Context()
net_config = config["nodes"]
# Channel to receive work
net_to_filter = net_config["filter"]["connect"]
self.work_socket = self.context.socket(zmq.PULL)
self.work_socket.connect("tcp://{}:{}".format(
net_to_filter["ip"],
net_to_filter["port"]))
# Channel to receive stop signal
net_to_signal = net_config["signal"]["connect"]
self.control_socket = self.context.socket(zmq.SUB)
self.control_socket.connect("tcp://{}:{}".format(
net_to_signal["ip"],
net_to_signal["port"]))
self.control_socket.setsockopt_string(zmq.SUBSCRIBE, "")
# Channel to send processed work
net_to_proxy = net_config["proxy"]["connect"]
self.join_socket = self.context.socket(zmq.PUSH)
self.join_socket.connect("tcp://{}:{}".format(
net_to_proxy["ip"],
net_to_proxy["port"]))
self.poll_sockets = {
"work": self.work_socket,
"control": self.control_socket,
"join": self.join_socket
}
# Poller multiplexer
self.poller = zmq.Poller()
self.poller.register(self.work_socket, zmq.POLLIN)
self.poller.register(self.control_socket, zmq.POLLIN)
# Set the polling with a
# time-out of 0.5 seconds
def poll(self):
return dict(self.poller.poll(500))
def test(self, sockets, sock_name):
s = self.poll_sockets[sock_name]
return sockets.get(s) == zmq.POLLIN
def recv(self, sockets, sock_name):
return self.poll_sockets[sock_name].recv_string()
def send(self, sock_name, data):
self.poll_sockets[sock_name].send_string(data)
def close(self):
self.work_socket.close()
self.control_socket.close()
self.join_socket.close()
| 29.301158
| 67
| 0.561866
| 854
| 7,589
| 4.809133
| 0.120609
| 0.050402
| 0.053811
| 0.048697
| 0.807889
| 0.769905
| 0.724617
| 0.704651
| 0.704651
| 0.702216
| 0
| 0.003275
| 0.315984
| 7,589
| 258
| 68
| 29.414729
| 0.787902
| 0.097641
| 0
| 0.670968
| 0
| 0
| 0.047507
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167742
| false
| 0
| 0.019355
| 0.03871
| 0.290323
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7d8ad9440055aa98f273eb2823ced52b5fd25c12
| 145
|
py
|
Python
|
dpd/mapping/lane.py
|
davidbailey/dpd
|
29bce937e34afa2161788a5c4a911e590a388229
|
[
"MIT"
] | 6
|
2020-08-13T22:21:25.000Z
|
2021-09-15T19:12:51.000Z
|
dpd/mapping/lane.py
|
davidbailey/dpd
|
29bce937e34afa2161788a5c4a911e590a388229
|
[
"MIT"
] | 3
|
2018-01-25T09:11:01.000Z
|
2020-12-22T17:31:24.000Z
|
dpd/mapping/lane.py
|
davidbailey/dpd
|
29bce937e34afa2161788a5c4a911e590a388229
|
[
"MIT"
] | null | null | null |
from .segment import Segment
class Lane(Segment):
def __init__(self, link, segment_number):
super().__init__(link, segment_number)
| 20.714286
| 46
| 0.717241
| 18
| 145
| 5.222222
| 0.611111
| 0.234043
| 0.361702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17931
| 145
| 6
| 47
| 24.166667
| 0.789916
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
7df2adb16ab7f79b911cf025d3cdcb9ff5b242c1
| 41
|
py
|
Python
|
plugins/backups_plugin/__init__.py
|
StarryPy/StarryPy-Historic
|
b9dbd552b8c4631a5a8e9dda98b7ba447eca59da
|
[
"WTFPL"
] | 38
|
2015-02-12T11:57:59.000Z
|
2018-11-15T16:03:45.000Z
|
plugins/backups_plugin/__init__.py
|
StarryPy/StarryPy-Historic
|
b9dbd552b8c4631a5a8e9dda98b7ba447eca59da
|
[
"WTFPL"
] | 68
|
2015-02-05T23:29:47.000Z
|
2017-12-27T08:26:25.000Z
|
plugins/backups_plugin/__init__.py
|
StarryPy/StarryPy-Historic
|
b9dbd552b8c4631a5a8e9dda98b7ba447eca59da
|
[
"WTFPL"
] | 21
|
2015-02-06T18:58:21.000Z
|
2017-12-24T20:08:59.000Z
|
from backups_plugin import BackupsPlugin
| 20.5
| 40
| 0.902439
| 5
| 41
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
81759d319cf4b31c769cf2059f4c56bcd6dfdf05
| 195
|
py
|
Python
|
python/testData/formatter/trailingBlockCommentsIndentationPreserved.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/formatter/trailingBlockCommentsIndentationPreserved.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/formatter/trailingBlockCommentsIndentationPreserved.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class C:
def method(self):
def foo():
def bar():
pass
# bar
# bar
# bar
# method
# class
| 17.727273
| 25
| 0.282051
| 15
| 195
| 3.666667
| 0.533333
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.641026
| 195
| 10
| 26
| 19.5
| 0.785714
| 0.123077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.6
| false
| 0.2
| 0
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
c496dfb7277948961c1198296d2824e9b99f08a8
| 172
|
py
|
Python
|
configs/resnet/resnet50_8xb16_doc_quality.py
|
ndcuong91/mmclassification
|
4fcfbe6cdfbb91e2595367e6b3a21f86e363dd6b
|
[
"Apache-2.0"
] | null | null | null |
configs/resnet/resnet50_8xb16_doc_quality.py
|
ndcuong91/mmclassification
|
4fcfbe6cdfbb91e2595367e6b3a21f86e363dd6b
|
[
"Apache-2.0"
] | null | null | null |
configs/resnet/resnet50_8xb16_doc_quality.py
|
ndcuong91/mmclassification
|
4fcfbe6cdfbb91e2595367e6b3a21f86e363dd6b
|
[
"Apache-2.0"
] | null | null | null |
_base_ = [
'../_base_/models/resnet50_doc_quality.py', '../_base_/datasets/doc_quality.py',
'../_base_/schedules/schedule_200.py', '../_base_/default_runtime.py'
]
| 34.4
| 84
| 0.69186
| 21
| 172
| 4.952381
| 0.571429
| 0.173077
| 0.230769
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032051
| 0.093023
| 172
| 4
| 85
| 43
| 0.634615
| 0
| 0
| 0
| 0
| 0
| 0.790698
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.