hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c5375a61f70cf626edb6616015a200154696be87
| 12
|
py
|
Python
|
hi.py
|
KINGDX/hellogit
|
0ad2ed501a715d75207a93bdaa738d551721a295
|
[
"Apache-2.0"
] | null | null | null |
hi.py
|
KINGDX/hellogit
|
0ad2ed501a715d75207a93bdaa738d551721a295
|
[
"Apache-2.0"
] | null | null | null |
hi.py
|
KINGDX/hellogit
|
0ad2ed501a715d75207a93bdaa738d551721a295
|
[
"Apache-2.0"
] | null | null | null |
print('110')
| 12
| 12
| 0.666667
| 2
| 12
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 12
| 1
| 12
| 12
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
c55d40fa21ef854bc754abff15b93730c3e3cafb
| 5,761
|
py
|
Python
|
demos/Multiscale/amm_mechanism/model/parts/uniswap.py
|
w-ghub/demos
|
6382676fae89bd5a190626612712fcedf17bca6d
|
[
"MIT"
] | 56
|
2020-07-08T23:23:15.000Z
|
2022-03-11T20:43:09.000Z
|
demos/Multiscale/amm_mechanism/model/parts/uniswap.py
|
w-ghub/demos
|
6382676fae89bd5a190626612712fcedf17bca6d
|
[
"MIT"
] | 41
|
2020-07-11T23:24:06.000Z
|
2022-01-28T13:28:07.000Z
|
demos/Multiscale/amm_mechanism/model/parts/uniswap.py
|
w-ghub/demos
|
6382676fae89bd5a190626612712fcedf17bca6d
|
[
"MIT"
] | 39
|
2020-07-15T11:35:04.000Z
|
2022-02-01T16:02:51.000Z
|
import numpy as np
import pandas as pd
from .utils import *
from .agent_utils import *
# Mechanisms
def mechanismHub_Ri(params, substep, state_history, prev_state, policy_input):
"""
This function returns the approprate pool function to a given policy input:
- Ri_Purchase --> q_to_r_Ri
- Q_Purchase --> r_to_q_Ri
- AddLiquidity --> addLiquidity_Ri
- RemoveLiquidity --> removeLiquidity_Ri
"""
action = policy_input['action_id']
asset_id = policy_input['asset_id'] # defines asset subscript
if action == 'Ri_Purchase':
return q_to_r_Ri(params, substep, state_history, prev_state, policy_input)
elif action == 'Q_Purchase':
return r_to_q_Ri(params, substep, state_history, prev_state, policy_input)
elif action == 'AddLiquidity':
return addLiquidity_Ri(params, substep, state_history, prev_state, policy_input)
elif action == 'RemoveLiquidity':
return removeLiquidity_Ri(params, substep, state_history, prev_state, policy_input)
return('UNI_R' + asset_id, prev_state['UNI_R' + asset_id])
def mechanismHub_Q(params, substep, state_history, prev_state, policy_input):
"""
This function returns the approprate pool function to a given policy input:
- Ri_Purchase --> q_to_r_Q
- Q_Purchase --> r_to_q_Q
- AddLiquidity --> addLiquidity_Q
- RemoveLiquidity --> removeLiquidity_Q
"""
action = policy_input['action_id']
asset_id = policy_input['asset_id'] # defines asset subscript
if action == 'Ri_Purchase':
return q_to_r_Q(params, substep, state_history, prev_state, policy_input)
elif action == 'Q_Purchase':
return r_to_q_Q(params, substep, state_history, prev_state, policy_input)
elif action == 'AddLiquidity':
return addLiquidity_Q(params, substep, state_history, prev_state, policy_input)
elif action == 'RemoveLiquidity':
return removeLiquidity_Q(params, substep, state_history, prev_state, policy_input)
return('UNI_Q'+ asset_id, prev_state['UNI_Q'+ asset_id])
def mechanismHub_Si(params, substep, state_history, prev_state, policy_input):
"""
This function returns the approprate pool function to a given policy input:
- AddLiquidity --> addLiquidity_Si
- RemoveLiquidity --> removeLiquidity_Si
"""
action = policy_input['action_id']
asset_id = policy_input['asset_id'] # defines asset subscript
if action == 'AddLiquidity':
return addLiquidity_Si(params, substep, state_history, prev_state, policy_input)
elif action == 'RemoveLiquidity':
return removeLiquidity_Si(params, substep, state_history, prev_state, policy_input)
return('UNI_S'+ asset_id, prev_state['UNI_S'+ asset_id])
def agenthub(params, substep, state_history, prev_state, policy_input):
"""
This function returns the approprate agent function to a given policy input:
- Ri_Purchase --> agent_q_to_r_trade
- Q_Purchase --> agent_r_to_q_trade
- AddLiquidity --> agent_add_liq
- RemoveLiquidity --> agent_remove_liq
- R_Swap --> agent_r_to_r_swap
"""
action = policy_input['action_id']
if action == 'Ri_Purchase':
return agent_q_to_r_trade(params, substep, state_history, prev_state, policy_input)
elif action == 'Q_Purchase':
return agent_r_to_q_trade(params, substep, state_history, prev_state, policy_input)
elif action == 'AddLiquidity':
return agent_add_liq(params, substep, state_history, prev_state, policy_input)
elif action == 'RemoveLiquidity':
return agent_remove_liq(params, substep, state_history, prev_state, policy_input)
elif action == 'R_Swap':
return agent_r_to_r_swap(params, substep, state_history, prev_state, policy_input)
return('uni_agents', prev_state['uni_agents'])
def mechanismHub_ij(params, substep, state_history, prev_state, policy_input):
"""
This function returns the approprate pool function to a given policy input depending on the 'direction':
- R_Swap --> agent_r_to_r_in
- R_Swap --> agent_r_to_r_out
"""
action = policy_input['action_id']
if action == 'R_Swap':
asset_id = policy_input['asset_id']
purchased_asset_id = policy_input['purchased_asset_id']
# direction = policy_input['direction']
direction = asset_id + purchased_asset_id
in_direction = 'ij'
out_direction = in_direction[::-1]
if direction == in_direction:
return r_to_r_in(params, substep, state_history, prev_state, policy_input)
elif direction == out_direction:
return r_to_r_out(params, substep, state_history, prev_state, policy_input)
# return('UNI_' + asset_id + purchased_asset_id, prev_state['UNI_' + asset_id + purchased_asset_id])
return('UNI_ij', prev_state['UNI_ij'])
def mechanismHub_ji(params, substep, state_history, prev_state, policy_input):
"""
This function returns the approprate pool function to a given policy input depending on the 'direction':
- R_Swap --> agent_r_to_r_in
- R_Swap --> agent_r_to_r_out
"""
action = policy_input['action_id']
if action == 'R_Swap':
asset_id = policy_input['asset_id']
purchased_asset_id = policy_input['purchased_asset_id']
# direction = policy_input['direction']
direction = asset_id + purchased_asset_id
in_direction = 'ji'
out_direction = in_direction[::-1]
if direction == in_direction:
return r_to_r_in(params, substep, state_history, prev_state, policy_input)
elif direction == out_direction:
return r_to_r_out(params, substep, state_history, prev_state, policy_input)
# return('UNI_' + asset_id + purchased_asset_id, prev_state['UNI_' + asset_id + purchased_asset_id])
return('UNI_ji', prev_state['UNI_ji'])
| 43.315789
| 104
| 0.716716
| 777
| 5,761
| 4.93565
| 0.078507
| 0.131943
| 0.11734
| 0.162973
| 0.854759
| 0.812516
| 0.808866
| 0.800261
| 0.790613
| 0.787484
| 0
| 0.000426
| 0.184343
| 5,761
| 133
| 105
| 43.315789
| 0.815705
| 0.265926
| 0
| 0.533333
| 0
| 0
| 0.09484
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.053333
| 0
| 0.386667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3db4c4a5a3d283966ab719935cc53b70d7bc8be9
| 69
|
py
|
Python
|
principal.py
|
xLooKsx/ES-17-2
|
797878bc75bf81f9472e7218fc270d2478b5f3e5
|
[
"Apache-2.0"
] | null | null | null |
principal.py
|
xLooKsx/ES-17-2
|
797878bc75bf81f9472e7218fc270d2478b5f3e5
|
[
"Apache-2.0"
] | null | null | null |
principal.py
|
xLooKsx/ES-17-2
|
797878bc75bf81f9472e7218fc270d2478b5f3e5
|
[
"Apache-2.0"
] | null | null | null |
def somar(x, y):
return x+y
def subtrair(x, y):
return x-y
| 9.857143
| 19
| 0.565217
| 14
| 69
| 2.785714
| 0.428571
| 0.205128
| 0.410256
| 0.461538
| 0.512821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289855
| 69
| 6
| 20
| 11.5
| 0.795918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
3dc31a1e758e081e99bdf8ee2f9823c260b76d75
| 4,337
|
py
|
Python
|
tests/test_sync_file_lists.py
|
serge-m/ml-dataset-tools
|
2dc07749e343b069283ec798355fac138312f8c0
|
[
"MIT"
] | null | null | null |
tests/test_sync_file_lists.py
|
serge-m/ml-dataset-tools
|
2dc07749e343b069283ec798355fac138312f8c0
|
[
"MIT"
] | null | null | null |
tests/test_sync_file_lists.py
|
serge-m/ml-dataset-tools
|
2dc07749e343b069283ec798355fac138312f8c0
|
[
"MIT"
] | null | null | null |
import ml_dataset_tools as mdt
import pytest
def test_simple():
sl = mdt.SyncFileLists({'images': ['i1', 'i2'], 'classes': ['c1', 'c2']})
assert list(sl['images']) == ['i1', 'i2']
assert list(sl['classes']) == ['c1', 'c2']
with pytest.raises(KeyError):
sl['non-existent']
def test_with_selector():
train = mdt.SyncFileLists.Type.train
valid = mdt.SyncFileLists.Type.valid
test = mdt.SyncFileLists.Type.test
sl = mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
assert list(sl['images']) == ['i1', 'i2', 'i3']
assert list(sl['images', train]) == ['i1', 'i3']
assert list(sl['images', test]) == ['i2']
assert list(sl['images', valid]) == []
def test_eq():
train = mdt.SyncFileLists.Type.train
valid = mdt.SyncFileLists.Type.valid
test = mdt.SyncFileLists.Type.test
sl = mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
assert sl == mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
assert sl != mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3']},
selector=[train, test, train]
)
assert sl != mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[test, test, train]
)
assert sl != mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
)
def test_transform():
train = mdt.SyncFileLists.Type.train
valid = mdt.SyncFileLists.Type.valid
test = mdt.SyncFileLists.Type.test
sl = mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
result = sl.with_transformed('images', lambda x: x.replace('i', 'L'), 'labels')
assert result == mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3'], 'labels': ['L1', 'L2', 'L3']},
selector=[train, test, train]
)
assert sl == mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
def test_with_val():
train = mdt.SyncFileLists.Type.train
valid = mdt.SyncFileLists.Type.valid
test = mdt.SyncFileLists.Type.test
sl = mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
result = sl.with_val('images', lambda x: x[-1] in ['2', '3'])
assert result == mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, valid, valid]
)
assert sl == mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
def test_filter_one_column():
train = mdt.SyncFileLists.Type.train
valid = mdt.SyncFileLists.Type.valid
test = mdt.SyncFileLists.Type.test
sl = mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
result = sl.filter(['images'], lambda x, *_: x[-1] in ['2', '3'])
assert result == mdt.SyncFileLists(
dict_lists={'images': ['i2', 'i3'], 'classes': ['c2', 'c3']},
selector=[test, train]
)
assert sl == mdt.SyncFileLists(
dict_lists={'images': ['i1', 'i2', 'i3'], 'classes': ['c1', 'c2', 'c3']},
selector=[train, test, train]
)
def test_filter_two_columns():
train = mdt.SyncFileLists.Type.train
valid = mdt.SyncFileLists.Type.valid
test = mdt.SyncFileLists.Type.test
sl = mdt.SyncFileLists(
dict_lists={'images': ['X', 'Y', 'Y'], 'classes': ['good1', 'good2', 'bad3']},
selector=[train, test, train]
)
result = sl.filter(['images', 'classes'], lambda i, c: i == 'Y' and c.startswith('good'))
assert result == mdt.SyncFileLists(
dict_lists={'images': ['Y'], 'classes': ['good2']},
selector=[test]
)
| 31.656934
| 111
| 0.554761
| 514
| 4,337
| 4.61284
| 0.124514
| 0.242935
| 0.151835
| 0.179249
| 0.81822
| 0.80135
| 0.785323
| 0.767187
| 0.746942
| 0.746942
| 0
| 0.033234
| 0.222965
| 4,337
| 136
| 112
| 31.889706
| 0.670326
| 0
| 0
| 0.551402
| 0
| 0
| 0.128459
| 0
| 0
| 0
| 0
| 0
| 0.158879
| 1
| 0.065421
| false
| 0
| 0.018692
| 0
| 0.084112
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b10de32c98f5fe200b5b5e7cc4f4e438cd77aec9
| 7,883
|
py
|
Python
|
test/test_util.py
|
sanjaymsh/javaproperties
|
7ce3abfbbc4eadaa82f98e17542b6305679f87f3
|
[
"MIT"
] | null | null | null |
test/test_util.py
|
sanjaymsh/javaproperties
|
7ce3abfbbc4eadaa82f98e17542b6305679f87f3
|
[
"MIT"
] | null | null | null |
test/test_util.py
|
sanjaymsh/javaproperties
|
7ce3abfbbc4eadaa82f98e17542b6305679f87f3
|
[
"MIT"
] | null | null | null |
import pytest
from javaproperties.util import LinkedList, ascii_splitlines
def test_linkedlist_empty():
ll = LinkedList()
assert list(ll) == []
assert list(ll.iternodes()) == []
assert ll.start is None
assert ll.end is None
def test_linkedlist_one_elem():
ll = LinkedList()
n = ll.append(42)
assert list(ll) == [42]
assert list(ll.iternodes()) == [n]
assert ll.find_node(n) == 0
assert ll.start is n
assert ll.end is n
assert n.prev is None
assert n.next is None
def test_linkedlist_two_elem():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
assert list(ll) == [42, 'fnord']
assert list(ll.iternodes()) == [n1, n2]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 1
assert ll.start is n1
assert ll.end is n2
assert n1.prev is None
assert n1.next is n2
assert n2.prev is n1
assert n2.next is None
def test_linked_list_three_elem():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
assert list(ll) == [42, 'fnord', [0, 1, 2]]
assert list(ll.iternodes()) == [n1, n2, n3]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 1
assert ll.find_node(n3) == 2
assert ll.start is n1
assert ll.end is n3
assert n1.prev is None
assert n1.next is n2
assert n2.prev is n1
assert n2.next is n3
assert n3.prev is n2
assert n3.next is None
def test_linked_list_unlink_only():
ll = LinkedList()
n = ll.append(42)
n.unlink()
assert list(ll) == []
assert list(ll.iternodes()) == []
assert ll.start is None
assert ll.end is None
assert ll.find_node(n) is None
def test_linked_list_unlink_first():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
n1.unlink()
assert list(ll) == ['fnord', [0, 1, 2]]
assert list(ll.iternodes()) == [n2, n3]
assert ll.find_node(n1) is None
assert ll.find_node(n2) == 0
assert ll.find_node(n3) == 1
assert ll.start is n2
assert ll.end is n3
assert n2.prev is None
assert n2.next is n3
assert n3.prev is n2
assert n3.next is None
def test_linked_list_unlink_middle():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
n2.unlink()
assert list(ll) == [42, [0, 1, 2]]
assert list(ll.iternodes()) == [n1, n3]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) is None
assert ll.find_node(n3) == 1
assert ll.start is n1
assert ll.end is n3
assert n1.prev is None
assert n1.next is n3
assert n3.prev is n1
assert n3.next is None
def test_linked_list_unlink_last():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
n3.unlink()
assert list(ll) == [42, 'fnord']
assert list(ll.iternodes()) == [n1, n2]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 1
assert ll.find_node(n3) is None
assert ll.start is n1
assert ll.end is n2
assert n1.prev is None
assert n1.next is n2
assert n2.prev is n1
assert n2.next is None
def test_linked_list_insert_before_first():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
nx = n1.insert_before(3.14)
assert list(ll) == [3.14, 42, 'fnord', [0, 1, 2]]
assert list(ll.iternodes()) == [nx, n1, n2, n3]
assert ll.find_node(n1) == 1
assert ll.find_node(n2) == 2
assert ll.find_node(n3) == 3
assert ll.find_node(nx) == 0
assert ll.start is nx
assert ll.end is n3
assert nx.prev is None
assert nx.next is n1
assert n1.prev is nx
assert n1.next is n2
assert n2.prev is n1
assert n2.next is n3
assert n3.prev is n2
assert n3.next is None
def test_linked_list_insert_before_middle():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
nx = n2.insert_before(3.14)
assert list(ll) == [42, 3.14, 'fnord', [0, 1, 2]]
assert list(ll.iternodes()) == [n1, nx, n2, n3]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 2
assert ll.find_node(n3) == 3
assert ll.find_node(nx) == 1
assert ll.start is n1
assert ll.end is n3
assert n1.prev is None
assert n1.next is nx
assert nx.prev is n1
assert nx.next is n2
assert n2.prev is nx
assert n2.next is n3
assert n3.prev is n2
assert n3.next is None
def test_linked_list_insert_before_last():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
nx = n3.insert_before(3.14)
assert list(ll) == [42, 'fnord', 3.14, [0, 1, 2]]
assert list(ll.iternodes()) == [n1, n2, nx, n3]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 1
assert ll.find_node(n3) == 3
assert ll.find_node(nx) == 2
assert ll.start is n1
assert ll.end is n3
assert n1.prev is None
assert n1.next is n2
assert n2.prev is n1
assert n2.next is nx
assert nx.prev is n2
assert nx.next is n3
assert n3.prev is nx
assert n3.next is None
def test_linked_list_insert_after_first():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
nx = n1.insert_after(3.14)
assert list(ll) == [42, 3.14, 'fnord', [0, 1, 2]]
assert list(ll.iternodes()) == [n1, nx, n2, n3]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 2
assert ll.find_node(n3) == 3
assert ll.find_node(nx) == 1
assert ll.start is n1
assert ll.end is n3
assert n1.prev is None
assert n1.next is nx
assert nx.prev is n1
assert nx.next is n2
assert n2.prev is nx
assert n2.next is n3
assert n3.prev is n2
assert n3.next is None
def test_linked_list_insert_after_middle():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
nx = n2.insert_after(3.14)
assert list(ll) == [42, 'fnord', 3.14, [0, 1, 2]]
assert list(ll.iternodes()) == [n1, n2, nx, n3]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 1
assert ll.find_node(n3) == 3
assert ll.find_node(nx) == 2
assert ll.start is n1
assert ll.end is n3
assert n1.prev is None
assert n1.next is n2
assert n2.prev is n1
assert n2.next is nx
assert nx.prev is n2
assert nx.next is n3
assert n3.prev is nx
assert n3.next is None
def test_linked_list_insert_after_last():
ll = LinkedList()
n1 = ll.append(42)
n2 = ll.append('fnord')
n3 = ll.append([0, 1, 2])
nx = n3.insert_after(3.14)
assert list(ll) == [42, 'fnord', [0, 1, 2], 3.14]
assert list(ll.iternodes()) == [n1, n2, n3, nx]
assert ll.find_node(n1) == 0
assert ll.find_node(n2) == 1
assert ll.find_node(n3) == 2
assert ll.find_node(nx) == 3
assert ll.start is n1
assert ll.end is nx
assert n1.prev is None
assert n1.next is n2
assert n2.prev is n1
assert n2.next is n3
assert n3.prev is n2
assert n3.next is nx
assert nx.prev is n3
assert nx.next is None
@pytest.mark.parametrize('s,lines', [
('', []),
('foobar', ['foobar']),
('foo\n', ['foo\n']),
('foo\r', ['foo\r']),
('foo\r\n', ['foo\r\n']),
('foo\n\r', ['foo\n', '\r']),
('foo\nbar', ['foo\n', 'bar']),
('foo\rbar', ['foo\r', 'bar']),
('foo\r\nbar', ['foo\r\n', 'bar']),
('foo\n\rbar', ['foo\n', '\r', 'bar']),
(
'Why\vare\fthere\x1Cso\x1Ddang\x1Emany\x85line\u2028separator\u2029'
'characters?',
['Why\vare\fthere\x1Cso\x1Ddang\x1Emany\x85line\u2028separator\u2029'
'characters?'],
),
])
def test_ascii_splitlines(s, lines):
assert ascii_splitlines(s) == lines
| 28.356115
| 77
| 0.602055
| 1,343
| 7,883
| 3.453462
| 0.051378
| 0.117292
| 0.103493
| 0.137991
| 0.889608
| 0.860069
| 0.831608
| 0.803364
| 0.779431
| 0.749677
| 0
| 0.071647
| 0.254599
| 7,883
| 277
| 78
| 28.458484
| 0.717665
| 0
| 0
| 0.683206
| 0
| 0
| 0.050488
| 0.016745
| 0
| 0
| 0
| 0
| 0.645038
| 1
| 0.057252
| false
| 0
| 0.007634
| 0
| 0.064886
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b12240da5c5f86c9bb40816505e97798f84fcdd0
| 12
|
py
|
Python
|
_draft/x_9_9.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
_draft/x_9_9.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | 1
|
2021-11-13T08:03:04.000Z
|
2021-11-13T08:03:04.000Z
|
_draft/x_9_9.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
# x_9_9
#
#
| 3
| 7
| 0.416667
| 3
| 12
| 1
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.333333
| 12
| 3
| 8
| 4
| 0.125
| 0.416667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b1347d7d53394123fff0bf6cbafc6449ba856f29
| 7,514
|
py
|
Python
|
scripts/part2/DetailPlacement.py
|
GeorgePap-719/Python_VLSI
|
d3d6e71e0a4e54571a8fa4b9ecbc2ffd48444f66
|
[
"MIT"
] | 1
|
2021-06-14T17:35:44.000Z
|
2021-06-14T17:35:44.000Z
|
scripts/part2/DetailPlacement.py
|
GeorgePap-719/Python_VLSI
|
d3d6e71e0a4e54571a8fa4b9ecbc2ffd48444f66
|
[
"MIT"
] | 1
|
2021-06-19T08:59:15.000Z
|
2021-06-19T08:59:15.000Z
|
scripts/part2/DetailPlacement.py
|
GeorgePap-719/Python_VLSI
|
d3d6e71e0a4e54571a8fa4b9ecbc2ffd48444f66
|
[
"MIT"
] | 1
|
2021-06-14T17:35:46.000Z
|
2021-06-14T17:35:46.000Z
|
import copy
from natsort import *
from scripts.classes.Net import total_calculate_net_wirelength
from scripts.classes.Node import Node
from scripts.part1.Legalization import legalizing_tetris_like_algo
# noinspection DuplicatedCode
def first_detailed_placement(node_list: list, row_list: list, net_list: list):
# update_net(net_list)
# print(total_calculate_net_wirelength(net_list))
# total_calculate_net_wirelength is already 7680 before it gets in here.
updated_net_list: list = copy.deepcopy(net_list)
changes_flag = True
wirelength = total_calculate_net_wirelength(net_list)
# we stop only when there are no more changes that lead to better result
while changes_flag:
changes_flag = False
""" Sort the given iterable in the way that humans expect."""
sorted_list: list[Node] = natsorted(node_list, key=lambda x: natsort_key(x.node_name))
for index, node in enumerate(sorted_list):
if index + 1 == len(sorted_list):
break
for index2, node2 in enumerate(sorted_list):
if index2 != index:
# All possible pairs, excluding duplicates
if node.node_width == node2.node_width:
swap_positions(sorted_list, index, index2)
update_net(updated_net_list)
new_wirelength = total_calculate_net_wirelength(updated_net_list)
if new_wirelength < wirelength:
print("found better wirelength")
wirelength = new_wirelength
changes_flag = True
break # make the first advantageous exchange
else:
# swap back positions
swap_positions(sorted_list, index2, index)
update_net(updated_net_list)
return node_list, row_list, updated_net_list
# noinspection DuplicatedCode
def second_detailed_placement(node_list: list, row_list: list, net_list: list):
updated_net_list: list = copy.deepcopy(net_list)
changes_flag = True
wirelength = total_calculate_net_wirelength(net_list)
# we stop only when there are no more changes that lead to better result
while changes_flag:
changes_flag = False
for index, node in enumerate(node_list):
if index + 1 == len(node_list):
break
for index2, node2 in enumerate(node_list):
if index2 != index:
# All possible pairs, excluding duplicates
if node.node_width == node2.node_width:
swap_positions(node_list, index, index2)
update_net(updated_net_list)
new_wirelength = total_calculate_net_wirelength(updated_net_list)
if new_wirelength > wirelength:
print("found better wirelength")
wirelength = new_wirelength
changes_flag = True
# Notice how there is no break here, we make the most optimal change
else:
# swap back positions
swap_positions(node_list, index2, index)
update_net(updated_net_list)
return node_list, row_list, updated_net_list
# noinspection DuplicatedCode
def third_detailed_placement(node_list: list, row_list: list, net_list: list):
updated_net_list: list = copy.deepcopy(net_list)
changes_flag = True
wirelength = total_calculate_net_wirelength(net_list)
# we stop only when there are no more changes that lead to better result
while changes_flag:
changes_flag = False
""" Sort the given iterable in the way that humans expect."""
sorted_list: list[Node] = natsorted(node_list, key=lambda x: natsort_key(x.node_name))
for index, node in enumerate(sorted_list):
if index + 1 == len(sorted_list):
break
for index2, node2 in enumerate(sorted_list):
if index2 != index:
# All possible pairs, excluding duplicates
if node.node_width == node2.node_width:
swap_positions(sorted_list, index, index2)
update_net(updated_net_list)
new_wirelength = total_calculate_net_wirelength(updated_net_list)
if new_wirelength < wirelength:
print("found better wirelength")
wirelength = new_wirelength
changes_flag = True
break # make the first advantageous exchange
else:
# swap back positions
swap_positions(sorted_list, index2, index)
update_net(updated_net_list)
legalizing_tetris_like_algo(sorted_list, row_list, updated_net_list)
return node_list, row_list, updated_net_list
# noinspection DuplicatedCode
def fourth_detailed_placement(node_list: list, row_list: list, net_list: list):
updated_net_list: list = copy.deepcopy(net_list)
changes_flag = True
wirelength = total_calculate_net_wirelength(net_list)
# we stop only when there are no more changes that lead to better result
while changes_flag:
changes_flag = False
for index, node in enumerate(node_list):
if index + 1 == len(node_list):
break
for index2, node2 in enumerate(node_list):
if index2 != index:
# All possible pairs, excluding duplicates
if node.node_width == node2.node_width:
swap_positions(node_list, index, index2)
update_net(updated_net_list)
new_wirelength = total_calculate_net_wirelength(updated_net_list)
if new_wirelength < wirelength:
print("found better wirelength")
wirelength = new_wirelength
changes_flag = True
# Notice how there is no break here, we make the most optimal change
else:
# swap back positions
swap_positions(node_list, index2, index)
update_net(updated_net_list)
legalizing_tetris_like_algo(node_list, row_list, updated_net_list)
return node_list, row_list, updated_net_list
def swap_positions(data_list: list, position1: int, position2: int):
node1: Node = data_list[position1]
node2: Node = data_list[position2]
temp_node: Node = data_list[position1] # dumb data
# swap
# noinspection PyUnboundLocalVariable
temp_node.node_x = node1.node_x
# temp_node.node_width = node1.node_width
temp_node.node_y = node1.node_y
node1.node_x = node2.node_x
# node1.node_width = node2.node_width
node1.node_y = node2.node_y
node2.node_x = temp_node.node_x
# node2.node_width = temp_node.node_width
node2.node_y = temp_node.node_y
def update_net(net_list: list):
for net in net_list:
net.find_coordinates_of_net()
net.calculate_net_wirelength()
| 43.183908
| 96
| 0.601544
| 857
| 7,514
| 4.985998
| 0.131855
| 0.062251
| 0.072081
| 0.069506
| 0.840627
| 0.814182
| 0.806225
| 0.805289
| 0.805289
| 0.805289
| 0
| 0.010524
| 0.342427
| 7,514
| 173
| 97
| 43.433526
| 0.854281
| 0.15358
| 0
| 0.777778
| 0
| 0
| 0.01482
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051282
| false
| 0
| 0.042735
| 0
| 0.128205
| 0.034188
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1402a1c9953fa83db4a881c9bd55608b495cbd0
| 27,954
|
py
|
Python
|
tests/unit/benchmark/contexts/test_standalone.py
|
mythwm/yardstick-wm
|
319ced11df92456b42c80cfd6e53c66dbd22a746
|
[
"Apache-2.0"
] | 1
|
2019-12-08T21:57:31.000Z
|
2019-12-08T21:57:31.000Z
|
tests/unit/benchmark/contexts/test_standalone.py
|
mythwm/yardstick-wm
|
319ced11df92456b42c80cfd6e53c66dbd22a746
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/benchmark/contexts/test_standalone.py
|
mythwm/yardstick-wm
|
319ced11df92456b42c80cfd6e53c66dbd22a746
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2016-2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Unittest for yardstick.benchmark.contexts.standalone
from __future__ import absolute_import
import os
import unittest
import mock
from yardstick.benchmark.contexts import standalone
from yardstick.benchmark.contexts.standalone import ovsdpdk, sriov
MOCKS = {
'yardstick.benchmark.contexts': mock.MagicMock(),
'yardstick.benchmark.contexts.standalone.sriov': mock.MagicMock(),
'yardstick.benchmark.contexts.standalone.ovsdpdk': mock.MagicMock(),
'yardstick.benchmark.contexts.standalone': mock.MagicMock(),
}
@mock.patch('yardstick.benchmark.contexts.standalone.ovsdpdk.time')
@mock.patch('yardstick.benchmark.contexts.standalone.time')
@mock.patch('yardstick.benchmark.contexts.standalone.sriov.time')
class StandaloneContextTestCase(unittest.TestCase):
NODES_SAMPLE = "nodes_sample_new.yaml"
NODES_SAMPLE_SRIOV = "nodes_sample_new_sriov.yaml"
NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample_new.yaml"
NODES_SAMPLE_OVSDPDK = "nodes_sample_ovs.yaml"
NODES_SAMPLE_OVSDPDK_ROLE = "nodes_sample_ovsdpdk.yaml"
NODES_DUPLICATE_OVSDPDK = "nodes_duplicate_sample_ovs.yaml"
def setUp(self):
self.test_context = standalone.StandaloneContext()
def test_construct(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
self.assertIsNone(self.test_context.name)
self.assertIsNone(self.test_context.file_path)
self.assertEqual(self.test_context.nodes, [])
self.assertEqual(self.test_context.nfvi_node, [])
def test_unsuccessful_init(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath("error_file")
}
self.assertRaises(IOError, self.test_context.init, attrs)
def test_successful_init_sriov(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs_sriov = {
'name': 'sriov',
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.Mock()
self.test_context.init(attrs_sriov)
self.assertEqual(self.test_context.name, "sriov")
self.assertEqual(len(self.test_context.nodes), 2)
self.assertEqual(len(self.test_context.nfvi_node), 2)
self.assertEqual(self.test_context.nfvi_node[0]["name"], "sriov")
def test_successful_init_ovs(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
attrs_ovs = {
'name': 'ovs',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.Mock()
self.test_context.init(attrs_ovs)
self.assertEqual(self.test_context.name, "ovs")
self.assertEqual(len(self.test_context.nodes), 2)
self.assertEqual(len(self.test_context.nfvi_node), 2)
self.assertEqual(self.test_context.nfvi_node[0]["name"], "ovs")
def test__get_server_with_dic_attr_name_sriov(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs_sriov = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.init(attrs_sriov)
attr_name = {'name': 'foo.bar'}
result = self.test_context._get_server(attr_name)
self.assertEqual(result, None)
def test__get_server_with_dic_attr_name_ovs(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs_ovs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.init(attrs_ovs)
attr_name = {'name': 'foo.bar'}
result = self.test_context._get_server(attr_name)
self.assertEqual(result, None)
def test__get_server_not_found_sriov(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.init(attrs)
attr_name = 'bar.foo'
result = self.test_context._get_server(attr_name)
self.assertEqual(result, None)
def test__get_server_not_found_ovs(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.init(attrs)
attr_name = 'bar.foo'
result = self.test_context._get_server(attr_name)
self.assertEqual(result, None)
def test__get_server_duplicate_sriov(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.Mock(return_value="sriov")
self.test_context.init(attrs)
attr_name = 'sriov.foo'
# self.test_context.name = "sriov"
self.assertRaises(ValueError, self.test_context._get_server, attr_name)
def test__get_server_duplicate_ovs(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_DUPLICATE_OVSDPDK)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.Mock(return_value="OvsDpdk")
self.test_context.init(attrs)
attr_name = 'ovs.foo'
self.assertRaises(
ValueError,
self.test_context._get_server,
attr_name)
def test__get_server_found_sriov(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE_SRIOV)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.Mock(return_value="OvsDpdk")
self.test_context.init(attrs)
attr_name = 'sriov.foo'
result = self.test_context._get_server(attr_name)
self.assertEqual(result['ip'], '10.123.123.122')
self.assertEqual(result['name'], 'sriov.foo')
self.assertEqual(result['user'], 'root')
def test__get_server_found_ovs(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK_ROLE)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.Mock(return_value="OvsDpdk")
self.test_context.init(attrs)
attr_name = 'ovs.foo'
result = self.test_context._get_server(attr_name)
self.assertEqual(result['ip'], '10.223.197.222')
self.assertEqual(result['name'], 'ovs.foo')
self.assertEqual(result['user'], 'root')
def test__deploy_unsuccessful(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
self.test_context.vm_deploy = False
def test__deploy_sriov_firsttime(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
MYSRIOV = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.vm_deploy = True
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.nfvi_obj.sriov = MYSRIOV
self.test_context.nfvi_obj.ssh_remote_machine = mock.Mock()
self.test_context.nfvi_obj.first_run = True
self.test_context.nfvi_obj.install_req_libs()
self.test_context.nfvi_obj.get_nic_details = mock.Mock()
PORTS = ['0000:06:00.0', '0000:06:00.1']
NIC_DETAILS = {
'interface': {0: 'enp6s0f0', 1: 'enp6s0f1'},
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'pci': ['0000:06:00.0', '0000:06:00.1'],
'phy_driver': 'i40e'}
DRIVER = 'i40e'
result = self.test_context.nfvi_obj.setup_sriov_context(
PORTS,
NIC_DETAILS,
DRIVER)
print("{0}".format(result))
self.assertIsNone(self.test_context.deploy())
def test__deploy_sriov_notfirsttime(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
MYSRIOV = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.vm_deploy = True
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.nfvi_obj.sriov = MYSRIOV
self.test_context.nfvi_obj.ssh_remote_machine = mock.Mock()
self.test_context.nfvi_obj.first_run = False
self.test_context.nfvi_obj.get_nic_details = mock.Mock()
PORTS = ['0000:06:00.0', '0000:06:00.1']
NIC_DETAILS = {
'interface': {0: 'enp6s0f0', 1: 'enp6s0f1'},
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'pci': ['0000:06:00.0', '0000:06:00.1'],
'phy_driver': 'i40e'}
DRIVER = 'i40e'
result = self.test_context.nfvi_obj.setup_sriov_context(
PORTS,
NIC_DETAILS,
DRIVER)
print("{0}".format(result))
self.assertIsNone(self.test_context.deploy())
def test__deploy_ovs_firsttime(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
MYOVS = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'flow': ['ovs-ofctl add-flow br0 in_port=1,action=output:3',
'ovs-ofctl add-flow br0 in_port=3,action=output:1'
'ovs-ofctl add-flow br0 in_port=4,action=output:2'
'ovs-ofctl add-flow br0 in_port=2,action=output:4'],
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.vm_deploy = True
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.ovs = MYOVS
self.test_context.nfvi_obj.ssh_remote_machine = mock.Mock()
self.test_context.nfvi_obj.first_run = True
self.test_context.nfvi_obj.install_req_libs()
self.test_context.nfvi_obj.get_nic_details = mock.Mock()
PORTS = ['0000:06:00.0', '0000:06:00.1']
NIC_DETAILS = {
'interface': {0: 'enp6s0f0', 1: 'enp6s0f1'},
'vports_mac': ['00:00:00:00:00:05', '00:00:00:00:00:06'],
'pci': ['0000:06:00.0', '0000:06:00.1'],
'phy_driver': 'i40e'}
DRIVER = 'i40e'
self.test_context.nfvi_obj.setup_ovs = mock.Mock()
self.test_context.nfvi_obj.start_ovs_serverswitch = mock.Mock()
self.test_context.nfvi_obj.setup_ovs_bridge = mock.Mock()
self.test_context.nfvi_obj.add_oflows = mock.Mock()
result = self.test_context.nfvi_obj.setup_ovs_context(
PORTS,
NIC_DETAILS,
DRIVER)
print("{0}".format(result))
self.assertIsNone(self.test_context.deploy())
def test__deploy_ovs_notfirsttime(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
MYOVS = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'flow': ['ovs-ofctl add-flow br0 in_port=1,action=output:3',
'ovs-ofctl add-flow br0 in_port=3,action=output:1'
'ovs-ofctl add-flow br0 in_port=4,action=output:2'
'ovs-ofctl add-flow br0 in_port=2,action=output:4'],
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.vm_deploy = True
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.ovs = MYOVS
self.test_context.nfvi_obj.ssh_remote_machine = mock.Mock()
self.test_context.nfvi_obj.first_run = False
self.test_context.nfvi_obj.get_nic_details = mock.Mock()
PORTS = ['0000:06:00.0', '0000:06:00.1']
NIC_DETAILS = {
'interface': {0: 'enp6s0f0', 1: 'enp6s0f1'},
'vports_mac': ['00:00:00:00:00:05', '00:00:00:00:00:06'],
'pci': ['0000:06:00.0', '0000:06:00.1'],
'phy_driver': 'i40e'}
DRIVER = 'i40e'
self.test_context.nfvi_obj.setup_ovs(PORTS)
self.test_context.nfvi_obj.start_ovs_serverswitch()
self.test_context.nfvi_obj.setup_ovs_bridge()
self.test_context.nfvi_obj.add_oflows()
result = self.test_context.nfvi_obj.setup_ovs_context(
PORTS,
NIC_DETAILS,
DRIVER)
print("{0}".format(result))
self.assertIsNone(self.test_context.deploy())
def test_undeploy_sriov(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.nfvi_obj.destroy_vm = mock.Mock()
self.assertIsNone(self.test_context.undeploy())
def test_undeploy_ovs(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
attrs = {
'name': 'foo',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK)
}
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.nfvi_obj.destroy_vm = mock.Mock()
self.assertIsNone(self.test_context.undeploy())
def test_get_nfvi_obj_sriov(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
with mock.patch('yardstick.benchmark.contexts.standalone.sriov'):
attrs = {
'name': 'sriov',
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
self.test_context.init(attrs)
self.test_context.nfvi_obj.file_path = self._get_file_abspath(
self.NODES_SAMPLE)
self.test_context.nfvi_node = [{
'name': 'sriov',
'vf_macs': ['00:00:00:71:7d:25', '00:00:00:71:7d:26'],
'ip': '10.223.197.140',
'role': 'Sriov',
'user': 'root',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'intel123',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.get_context_impl = mock.Mock(
return_value=sriov.Sriov)
self.assertIsNotNone(self.test_context.get_nfvi_obj())
def test_get_nfvi_obj_ovs(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
with mock.patch('yardstick.benchmark.contexts.standalone.ovsdpdk'):
attrs = {
'name': 'ovs',
'file': self._get_file_abspath(self.NODES_SAMPLE_OVSDPDK)
}
self.test_context.init(attrs)
self.test_context.nfvi_obj.file_path = self._get_file_abspath(
self.NODES_SAMPLE)
self.test_context.nfvi_node = [{
'name': 'ovs',
'vports_mac': ['00:00:00:00:00:03', '00:00:00:00:00:04'],
'ip': '10.223.197.140',
'role': 'Ovsdpdk',
'user': 'root',
'vpath': '/usr/local/',
'images': '/var/lib/libvirt/images/ubuntu1.img',
'phy_driver': 'i40e',
'password': 'password',
'phy_ports': ['0000:06:00.0', '0000:06:00.1']}]
self.test_context.get_nfvi_obj = mock.MagicMock()
self.test_context.init(attrs)
self.test_context.get_context_impl = mock.Mock(
return_value=ovsdpdk.Ovsdpdk)
self.assertIsNotNone(self.test_context.get_nfvi_obj())
def test_get_context_impl_correct_obj(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
with mock.patch.dict("sys.modules", MOCKS):
self.assertIsNotNone(self.test_context.get_context_impl('Sriov'))
def test_get_context_impl_wrong_obj(self, mock_sriov_time, mock_standlalone_time,
mock_ovsdpdk_time):
with mock.patch.dict("sys.modules", MOCKS):
self.assertRaises(
ValueError,
lambda: self.test_context.get_context_impl('wrong_object'))
def _get_file_abspath(self, filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(curr_path, filename)
return file_path
def test__get_network(self, mock_sriov_time, mock_standlalone_time, mock_ovsdpdk_time):
network1 = {
'name': 'net_1',
'vld_id': 'vld111',
'segmentation_id': 'seg54',
'network_type': 'type_a',
'physical_network': 'phys',
}
network2 = {
'name': 'net_2',
'vld_id': 'vld999',
}
self.test_context.networks = {
'a': network1,
'b': network2,
}
attr_name = None
self.assertIsNone(self.test_context._get_network(attr_name))
attr_name = {}
self.assertIsNone(self.test_context._get_network(attr_name))
attr_name = {'vld_id': 'vld777'}
self.assertIsNone(self.test_context._get_network(attr_name))
attr_name = 'vld777'
self.assertIsNone(self.test_context._get_network(attr_name))
attr_name = {'vld_id': 'vld999'}
expected = {
"name": 'net_2',
"vld_id": 'vld999',
"segmentation_id": None,
"network_type": None,
"physical_network": None,
}
result = self.test_context._get_network(attr_name)
self.assertDictEqual(result, expected)
attr_name = 'a'
expected = network1
result = self.test_context._get_network(attr_name)
self.assertDictEqual(result, expected)
if __name__ == '__main__':
unittest.main()
| 40.928258
| 99
| 0.559669
| 3,404
| 27,954
| 4.346357
| 0.071387
| 0.042176
| 0.134843
| 0.070632
| 0.879486
| 0.862386
| 0.834944
| 0.815275
| 0.79635
| 0.793309
| 0
| 0.081718
| 0.289511
| 27,954
| 682
| 100
| 40.98827
| 0.663209
| 0.024004
| 0
| 0.769103
| 0
| 0
| 0.237777
| 0.055859
| 0
| 0
| 0
| 0
| 0.068106
| 1
| 0.043189
| false
| 0.036545
| 0.009967
| 0
| 0.066445
| 0.006645
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b140d0680c2f0b19de85d7d847e8d3bc987f76b3
| 739
|
py
|
Python
|
evaluate_policy.py
|
apo88/MEIRL_EV_python2
|
bb6da0e3a6ab0c03162823a255bc7511766dce5b
|
[
"MIT"
] | null | null | null |
evaluate_policy.py
|
apo88/MEIRL_EV_python2
|
bb6da0e3a6ab0c03162823a255bc7511766dce5b
|
[
"MIT"
] | null | null | null |
evaluate_policy.py
|
apo88/MEIRL_EV_python2
|
bb6da0e3a6ab0c03162823a255bc7511766dce5b
|
[
"MIT"
] | null | null | null |
import numpy as np
def change_dir(policy):
for pre_s in range(100):
if(policy[pre_s]==0):
print pre_s,'r'
elif(policy[pre_s]==1):
print pre_s,'l'
elif(policy[pre_s]==2):
print pre_s,'d'
elif(policy[pre_s]==3):
print pre_s,'u'
else:
print pre_s,'s'
def max_dir(policy):
for pre_s in range(10*10):
if (np.argmax(policy[pre_s])==0):
print pre_s, 'r'
elif (np.argmax(policy[pre_s]==1)):
print pre_s, 'l'
elif (np.argmax(policy[pre_s]==2)):
print pre_s, 'd'
elif (np.argmax(policy[pre_s]==3)):
print pre_s, 'u'
else:
print pre_s, 's'
| 26.392857
| 43
| 0.488498
| 115
| 739
| 2.947826
| 0.252174
| 0.235988
| 0.265487
| 0.20059
| 0.879056
| 0.855457
| 0.784661
| 0.648968
| 0.648968
| 0.20649
| 0
| 0.031646
| 0.358593
| 739
| 28
| 44
| 26.392857
| 0.683544
| 0
| 0
| 0.48
| 0
| 0
| 0.013514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.04
| null | null | 0.4
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1518a28fb6731fe5240804b8d0bb83ccaea6917
| 112
|
py
|
Python
|
apps/production/filters/__init__.py
|
kane-zh/MES_server
|
d8d28768a054eee6433e3900908afd331fd92281
|
[
"Apache-2.0"
] | null | null | null |
apps/production/filters/__init__.py
|
kane-zh/MES_server
|
d8d28768a054eee6433e3900908afd331fd92281
|
[
"Apache-2.0"
] | null | null | null |
apps/production/filters/__init__.py
|
kane-zh/MES_server
|
d8d28768a054eee6433e3900908afd331fd92281
|
[
"Apache-2.0"
] | null | null | null |
from apps.production.filters.basicinfor_filters import *
from apps.production.filters.recording_filters import *
| 56
| 56
| 0.866071
| 14
| 112
| 6.785714
| 0.5
| 0.168421
| 0.378947
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 112
| 2
| 57
| 56
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b17ceb025d4dbb3f4565060c515202710f158e96
| 34,087
|
py
|
Python
|
ursina/scripts/generate_normals.py
|
clayboone/ursina
|
9aebd9403b924af260fbefbfd7cef5ad82feeff7
|
[
"MIT"
] | 1
|
2020-09-04T14:32:33.000Z
|
2020-09-04T14:32:33.000Z
|
ursina/scripts/generate_normals.py
|
clayboone/ursina
|
9aebd9403b924af260fbefbfd7cef5ad82feeff7
|
[
"MIT"
] | null | null | null |
ursina/scripts/generate_normals.py
|
clayboone/ursina
|
9aebd9403b924af260fbefbfd7cef5ad82feeff7
|
[
"MIT"
] | 1
|
2020-09-04T14:32:41.000Z
|
2020-09-04T14:32:41.000Z
|
# import numpy
def normalize_v3(arr):
''' Normalize a numpy array of 3 component vectors shape=(n,3) '''
import numpy
lens = numpy.sqrt( arr[:,0]**2 + arr[:,1]**2 + arr[:,2]**2 )
arr[:,0] /= lens
arr[:,1] /= lens
arr[:,2] /= lens
return arr
def generate_normals(vertices, triangles=None, smooth=True):
import numpy
if not triangles:
# print('generated triangles:', triangles)
new_tris = [(i, i+1, i+2) for i in range(0, len(vertices), 3)]
else:
new_tris = list()
for t in triangles:
if isinstance(t, int):
new_tris.append(t)
elif len(t) == 3:
new_tris.extend(t)
elif len(t) == 4:
new_tris.extend((t[0], t[1], t[2], t[2], t[3], t[0]))
new_tris = [(new_tris[i], new_tris[i+1], new_tris[i+2]) for i in range(0, len(new_tris), 3)]
vertices = numpy.array(vertices)
triangles = numpy.array(new_tris)
normals = numpy.zeros(vertices.shape, dtype=vertices.dtype)
#Create an indexed view into the vertex array using the array of three indices for triangles
tris = vertices[triangles]
#Calculate the normal for all the triangles, by taking the cross product of the vectors v1-v0, and v2-v0 in each triangle
n = numpy.cross(tris[::,1] - tris[::,0] ,tris[::,2] - tris[::,0])
# n is now an array of normals per triangle. The length of each normal is dependent the vertices,
# we need to normalize these, so that our next step weights each normal equally.
normalize_v3(n)
# inverse it, dunno why
n = [-e for e in n]
# now we have a normalized array of normals, one per triangle, i.e., per triangle normals.
# But instead of one per triangle (i.e., flat shading), we add to each vertex in that triangle,
# the triangles' normal. Multiple triangles would then contribute to every vertex, so we need to normalize again afterwards.
# The cool part, we can actually add the normals through an indexed view of our (zeroed) per vertex normal array
normals[triangles[:,0]] += n
normals[triangles[:,1]] += n
normals[triangles[:,2]] += n
normalize_v3(normals)
# smooth
if smooth:
vertices=vertices.tolist()
bucket = list()
for i, v in enumerate(vertices):
if i in bucket:
continue
overlapping_verts_indices = list()
for j, w in enumerate(vertices):
if w == v:
overlapping_verts_indices.append(j)
bucket.append(j)
average_normal = sum([normals[e] for e in overlapping_verts_indices]) / 3
for index in overlapping_verts_indices:
normals[index] = average_normal
return normals
if __name__ == '__main__':
vertices=((-0.0, -0.5, 0.0), (0.10159, -0.483975, -0.073809), (-0.038803, -0.483975, -0.119426), (0.361804, -0.22361, -0.262863), (0.304773, -0.328759, -0.221428), (0.406365, -0.25115, -0.147619), (-0.0, -0.5, 0.0), (-0.038803, -0.483975, -0.119426), (-0.125573, -0.483974, 0.0), (-0.0, -0.5, 0.0), (-0.125573, -0.483974, 0.0), (-0.038803, -0.483975, 0.119426), (-0.0, -0.5, 0.0), (-0.038803, -0.483975, 0.119426), (0.10159, -0.483975, 0.073809), (0.361804, -0.22361, -0.262863), (0.406365, -0.25115, -0.147619), (0.430349, -0.125575, -0.221429), (-0.138194, -0.22361, -0.425325), (-0.01482, -0.251151, -0.432092), (-0.077608, -0.125576, -0.477711), (-0.447213, -0.223608, 0.0), (-0.415525, -0.251149, -0.119427), (-0.478313, -0.125575, -0.073809), (-0.138194, -0.22361, 0.425325), (-0.241986, -0.251151, 0.358282), (-0.218003, -0.125576, 0.432094), (0.361804, -0.22361, 0.262863), (0.26597, -0.251151, 0.340856), (0.343579, -0.125576, 0.340858), (0.361804, -0.22361, -0.262863), (0.430349, -0.125575, -0.221429), (0.343579, -0.125576, -0.340858), (-0.138194, -0.22361, -0.425325), (-0.077608, -0.125576, -0.477711), (-0.218003, -0.125576, -0.432094), (-0.447213, -0.223608, 0.0), (-0.478313, -0.125575, -0.073809), (-0.478313, -0.125575, 0.073809), (-0.138194, -0.22361, 0.425325), (-0.218003, -0.125576, 0.432094), (-0.077608, -0.125576, 0.477711), (0.361804, -0.22361, 0.262863), (0.343579, -0.125576, 0.340858), (0.430349, -0.125575, 0.221429), (0.138194, 0.22361, -0.425325), (0.241986, 0.251151, -0.358282), (0.116411, 0.32876, -0.358282), (-0.361804, 0.22361, -0.262863), (-0.26597, 0.251151, -0.340856), (-0.304773, 0.328759, -0.221428), (-0.361804, 0.22361, 0.262863), (-0.406365, 0.25115, 0.147619), (-0.304773, 0.328759, 0.221428), (0.138194, 0.22361, 0.425325), (0.01482, 0.251151, 0.432092), (0.116411, 0.32876, 0.358282), (0.447213, 0.223608, 0.0), (0.415525, 0.251149, 0.119427), (0.376721, 0.328757, 0.0), (0.125573, 0.483974, 0.0), (0.038803, 0.483975, 0.119426), (-0.0, 0.5, 0.0), (0.262865, 0.425326, 0.0), (0.1809, 0.447215, 0.131431), (0.125573, 0.483974, 0.0), (0.376721, 0.328757, 0.0), (0.319097, 0.361805, 0.131432), (0.262865, 0.425326, 0.0), (0.125573, 0.483974, 0.0), (0.1809, 0.447215, 0.131431), (0.038803, 0.483975, 0.119426), (0.1809, 0.447215, 0.131431), (0.081228, 0.425327, 0.249998), (0.038803, 0.483975, 0.119426), (0.262865, 0.425326, 0.0), (0.319097, 0.361805, 0.131432), (0.1809, 0.447215, 0.131431), (0.319097, 0.361805, 0.131432), (0.223605, 0.361806, 0.262864), (0.1809, 0.447215, 0.131431), (0.1809, 0.447215, 0.131431), (0.223605, 0.361806, 0.262864), (0.081228, 0.425327, 0.249998), (0.223605, 0.361806, 0.262864), (0.116411, 0.32876, 0.358282), (0.081228, 0.425327, 0.249998), (0.376721, 0.328757, 0.0), (0.415525, 0.251149, 0.119427), (0.319097, 0.361805, 0.131432), (0.415525, 0.251149, 0.119427), (0.344095, 0.262868, 0.249998), (0.319097, 0.361805, 0.131432), (0.319097, 0.361805, 0.131432), (0.344095, 0.262868, 0.249998), (0.223605, 0.361806, 0.262864), (0.344095, 0.262868, 0.249998), (0.241986, 0.251151, 0.358282), (0.223605, 0.361806, 0.262864), (0.223605, 0.361806, 0.262864), (0.241986, 0.251151, 0.358282), (0.116411, 0.32876, 0.358282), (0.241986, 0.251151, 0.358282), (0.138194, 0.22361, 0.425325), (0.116411, 0.32876, 0.358282), (0.038803, 0.483975, 0.119426), (-0.10159, 0.483975, 0.073809), (-0.0, 0.5, 0.0), (0.081228, 0.425327, 0.249998), (-0.069099, 0.447215, 0.21266), (0.038803, 0.483975, 0.119426), (0.116411, 0.32876, 0.358282), (-0.026395, 0.361806, 0.344092), (0.081228, 0.425327, 0.249998), (0.038803, 0.483975, 0.119426), (-0.069099, 0.447215, 0.21266), (-0.10159, 0.483975, 0.073809), (-0.069099, 0.447215, 0.21266), (-0.212661, 0.425327, 0.154506), (-0.10159, 0.483975, 0.073809), (0.081228, 0.425327, 0.249998), (-0.026395, 0.361806, 0.344092), (-0.069099, 0.447215, 0.21266), (-0.026395, 0.361806, 0.344092), (-0.180902, 0.361806, 0.293889), (-0.069099, 0.447215, 0.21266), (-0.069099, 0.447215, 0.21266), (-0.180902, 0.361806, 0.293889), (-0.212661, 0.425327, 0.154506), (-0.180902, 0.361806, 0.293889), (-0.304773, 0.328759, 0.221428), (-0.212661, 0.425327, 0.154506), (0.116411, 0.32876, 0.358282), (0.01482, 0.251151, 0.432092), (-0.026395, 0.361806, 0.344092), (0.01482, 0.251151, 0.432092), (-0.131434, 0.262869, 0.404506), (-0.026395, 0.361806, 0.344092), (-0.026395, 0.361806, 0.344092), (-0.131434, 0.262869, 0.404506), (-0.180902, 0.361806, 0.293889), (-0.131434, 0.262869, 0.404506), (-0.26597, 0.251151, 0.340856), (-0.180902, 0.361806, 0.293889), (-0.180902, 0.361806, 0.293889), (-0.26597, 0.251151, 0.340856), (-0.304773, 0.328759, 0.221428), (-0.26597, 0.251151, 0.340856), (-0.361804, 0.22361, 0.262863), (-0.304773, 0.328759, 0.221428), (-0.10159, 0.483975, 0.073809), (-0.10159, 0.483975, -0.073809), (-0.0, 0.5, 0.0), (-0.212661, 0.425327, 0.154506), (-0.223605, 0.447215, 0.0), (-0.10159, 0.483975, 0.073809), (-0.304773, 0.328759, 0.221428), (-0.335408, 0.361805, 0.081229), (-0.212661, 0.425327, 0.154506), (-0.10159, 0.483975, 0.073809), (-0.223605, 0.447215, 0.0), (-0.10159, 0.483975, -0.073809), (-0.223605, 0.447215, 0.0), (-0.212661, 0.425327, -0.154506), (-0.10159, 0.483975, -0.073809), (-0.212661, 0.425327, 0.154506), (-0.335408, 0.361805, 0.081229), (-0.223605, 0.447215, 0.0), (-0.335408, 0.361805, 0.081229), (-0.335408, 0.361805, -0.081229), (-0.223605, 0.447215, 0.0), (-0.223605, 0.447215, 0.0), (-0.335408, 0.361805, -0.081229), (-0.212661, 0.425327, -0.154506), (-0.335408, 0.361805, -0.081229), (-0.304773, 0.328759, -0.221428), (-0.212661, 0.425327, -0.154506), (-0.304773, 0.328759, 0.221428), (-0.406365, 0.25115, 0.147619), (-0.335408, 0.361805, 0.081229), (-0.406365, 0.25115, 0.147619), (-0.425324, 0.262868, 0.0), (-0.335408, 0.361805, 0.081229), (-0.335408, 0.361805, 0.081229), (-0.425324, 0.262868, 0.0), (-0.335408, 0.361805, -0.081229), (-0.425324, 0.262868, 0.0), (-0.406365, 0.25115, -0.147619), (-0.335408, 0.361805, -0.081229), (-0.335408, 0.361805, -0.081229), (-0.406365, 0.25115, -0.147619), (-0.304773, 0.328759, -0.221428), (-0.406365, 0.25115, -0.147619), (-0.361804, 0.22361, -0.262863), (-0.304773, 0.328759, -0.221428), (-0.10159, 0.483975, -0.073809), (0.038803, 0.483975, -0.119426), (-0.0, 0.5, 0.0), (-0.212661, 0.425327, -0.154506), (-0.069099, 0.447215, -0.21266), (-0.10159, 0.483975, -0.073809), (-0.304773, 0.328759, -0.221428), (-0.180902, 0.361806, -0.293889), (-0.212661, 0.425327, -0.154506), (-0.10159, 0.483975, -0.073809), (-0.069099, 0.447215, -0.21266), (0.038803, 0.483975, -0.119426), (-0.069099, 0.447215, -0.21266), (0.081228, 0.425327, -0.249998), (0.038803, 0.483975, -0.119426), (-0.212661, 0.425327, -0.154506), (-0.180902, 0.361806, -0.293889), (-0.069099, 0.447215, -0.21266), (-0.180902, 0.361806, -0.293889), (-0.026395, 0.361806, -0.344092), (-0.069099, 0.447215, -0.21266), (-0.069099, 0.447215, -0.21266), (-0.026395, 0.361806, -0.344092), (0.081228, 0.425327, -0.249998), (-0.026395, 0.361806, -0.344092), (0.116411, 0.32876, -0.358282), (0.081228, 0.425327, -0.249998), (-0.304773, 0.328759, -0.221428), (-0.26597, 0.251151, -0.340856), (-0.180902, 0.361806, -0.293889), (-0.26597, 0.251151, -0.340856), (-0.131434, 0.262869, -0.404506), (-0.180902, 0.361806, -0.293889), (-0.180902, 0.361806, -0.293889), (-0.131434, 0.262869, -0.404506), (-0.026395, 0.361806, -0.344092), (-0.131434, 0.262869, -0.404506), (0.01482, 0.251151, -0.432092), (-0.026395, 0.361806, -0.344092), (-0.026395, 0.361806, -0.344092), (0.01482, 0.251151, -0.432092), (0.116411, 0.32876, -0.358282), (0.01482, 0.251151, -0.432092), (0.138194, 0.22361, -0.425325), (0.116411, 0.32876, -0.358282), (0.038803, 0.483975, -0.119426), (0.125573, 0.483974, 0.0), (-0.0, 0.5, 0.0), (0.081228, 0.425327, -0.249998), (0.1809, 0.447215, -0.131431), (0.038803, 0.483975, -0.119426), (0.116411, 0.32876, -0.358282), (0.223605, 0.361806, -0.262864), (0.081228, 0.425327, -0.249998), (0.038803, 0.483975, -0.119426), (0.1809, 0.447215, -0.131431), (0.125573, 0.483974, 0.0), (0.1809, 0.447215, -0.131431), (0.262865, 0.425326, 0.0), (0.125573, 0.483974, 0.0), (0.081228, 0.425327, -0.249998), (0.223605, 0.361806, -0.262864), (0.1809, 0.447215, -0.131431), (0.223605, 0.361806, -0.262864), (0.319097, 0.361805, -0.131432), (0.1809, 0.447215, -0.131431), (0.1809, 0.447215, -0.131431), (0.319097, 0.361805, -0.131432), (0.262865, 0.425326, 0.0), (0.319097, 0.361805, -0.131432), (0.376721, 0.328757, 0.0), (0.262865, 0.425326, 0.0), (0.116411, 0.32876, -0.358282), (0.241986, 0.251151, -0.358282), (0.223605, 0.361806, -0.262864), (0.241986, 0.251151, -0.358282), (0.344095, 0.262868, -0.249998), (0.223605, 0.361806, -0.262864), (0.223605, 0.361806, -0.262864), (0.344095, 0.262868, -0.249998), (0.319097, 0.361805, -0.131432), (0.344095, 0.262868, -0.249998), (0.415525, 0.251149, -0.119427), (0.319097, 0.361805, -0.131432), (0.319097, 0.361805, -0.131432), (0.415525, 0.251149, -0.119427), (0.376721, 0.328757, 0.0), (0.415525, 0.251149, -0.119427), (0.447213, 0.223608, 0.0), (0.376721, 0.328757, 0.0), (0.478313, 0.125575, 0.073809), (0.415525, 0.251149, 0.119427), (0.447213, 0.223608, 0.0), (0.475529, 0.0, 0.154506), (0.430902, 0.138198, 0.212661), (0.478313, 0.125575, 0.073809), (0.430349, -0.125575, 0.221429), (0.40451, 0.0, 0.293891), (0.475529, 0.0, 0.154506), (0.478313, 0.125575, 0.073809), (0.430902, 0.138198, 0.212661), (0.415525, 0.251149, 0.119427), (0.430902, 0.138198, 0.212661), (0.344095, 0.262868, 0.249998), (0.415525, 0.251149, 0.119427), (0.475529, 0.0, 0.154506), (0.40451, 0.0, 0.293891), (0.430902, 0.138198, 0.212661), (0.40451, 0.0, 0.293891), (0.33541, 0.138199, 0.344095), (0.430902, 0.138198, 0.212661), (0.430902, 0.138198, 0.212661), (0.33541, 0.138199, 0.344095), (0.344095, 0.262868, 0.249998), (0.33541, 0.138199, 0.344095), (0.241986, 0.251151, 0.358282), (0.344095, 0.262868, 0.249998), (0.430349, -0.125575, 0.221429), (0.343579, -0.125576, 0.340858), (0.40451, 0.0, 0.293891), (0.343579, -0.125576, 0.340858), (0.293893, -0.0, 0.404508), (0.40451, 0.0, 0.293891), (0.40451, 0.0, 0.293891), (0.293893, -0.0, 0.404508), (0.33541, 0.138199, 0.344095), (0.293893, -0.0, 0.404508), (0.218003, 0.125576, 0.432094), (0.33541, 0.138199, 0.344095), (0.33541, 0.138199, 0.344095), (0.218003, 0.125576, 0.432094), (0.241986, 0.251151, 0.358282), (0.218003, 0.125576, 0.432094), (0.138194, 0.22361, 0.425325), (0.241986, 0.251151, 0.358282), (0.077608, 0.125576, 0.477711), (0.01482, 0.251151, 0.432092), (0.138194, 0.22361, 0.425325), (-0.0, 0.0, 0.5), (-0.069099, 0.138199, 0.475528), (0.077608, 0.125576, 0.477711), (-0.077608, -0.125576, 0.477711), (-0.154508, -0.0, 0.475528), (-0.0, 0.0, 0.5), (0.077608, 0.125576, 0.477711), (-0.069099, 0.138199, 0.475528), (0.01482, 0.251151, 0.432092), (-0.069099, 0.138199, 0.475528), (-0.131434, 0.262869, 0.404506), (0.01482, 0.251151, 0.432092), (-0.0, 0.0, 0.5), (-0.154508, -0.0, 0.475528), (-0.069099, 0.138199, 0.475528), (-0.154508, -0.0, 0.475528), (-0.223608, 0.138199, 0.425324), (-0.069099, 0.138199, 0.475528), (-0.069099, 0.138199, 0.475528), (-0.223608, 0.138199, 0.425324), (-0.131434, 0.262869, 0.404506), (-0.223608, 0.138199, 0.425324), (-0.26597, 0.251151, 0.340856), (-0.131434, 0.262869, 0.404506), (-0.077608, -0.125576, 0.477711), (-0.218003, -0.125576, 0.432094), (-0.154508, -0.0, 0.475528), (-0.218003, -0.125576, 0.432094), (-0.293893, -0.0, 0.404508), (-0.154508, -0.0, 0.475528), (-0.154508, -0.0, 0.475528), (-0.293893, -0.0, 0.404508), (-0.223608, 0.138199, 0.425324), (-0.293893, -0.0, 0.404508), (-0.343579, 0.125576, 0.340858), (-0.223608, 0.138199, 0.425324), (-0.223608, 0.138199, 0.425324), (-0.343579, 0.125576, 0.340858), (-0.26597, 0.251151, 0.340856), (-0.343579, 0.125576, 0.340858), (-0.361804, 0.22361, 0.262863), (-0.26597, 0.251151, 0.340856), (-0.430349, 0.125575, 0.221429), (-0.406365, 0.25115, 0.147619), (-0.361804, 0.22361, 0.262863), (-0.475529, 0.0, 0.154506), (-0.473607, 0.138198, 0.081229), (-0.430349, 0.125575, 0.221429), (-0.478313, -0.125575, 0.073809), (-0.5, 0.0, -0.0), (-0.475529, 0.0, 0.154506), (-0.430349, 0.125575, 0.221429), (-0.473607, 0.138198, 0.081229), (-0.406365, 0.25115, 0.147619), (-0.473607, 0.138198, 0.081229), (-0.425324, 0.262868, 0.0), (-0.406365, 0.25115, 0.147619), (-0.475529, 0.0, 0.154506), (-0.5, 0.0, -0.0), (-0.473607, 0.138198, 0.081229), (-0.5, 0.0, -0.0), (-0.473606, 0.138198, -0.081229), (-0.473607, 0.138198, 0.081229), (-0.473607, 0.138198, 0.081229), (-0.473606, 0.138198, -0.081229), (-0.425324, 0.262868, 0.0), (-0.473606, 0.138198, -0.081229), (-0.406365, 0.25115, -0.147619), (-0.425324, 0.262868, 0.0), (-0.478313, -0.125575, 0.073809), (-0.478313, -0.125575, -0.073809), (-0.5, 0.0, -0.0), (-0.478313, -0.125575, -0.073809), (-0.475529, -0.0, -0.154506), (-0.5, 0.0, -0.0), (-0.5, 0.0, -0.0), (-0.475529, -0.0, -0.154506), (-0.473606, 0.138198, -0.081229), (-0.475529, -0.0, -0.154506), (-0.430349, 0.125575, -0.221429), (-0.473606, 0.138198, -0.081229), (-0.473606, 0.138198, -0.081229), (-0.430349, 0.125575, -0.221429), (-0.406365, 0.25115, -0.147619), (-0.430349, 0.125575, -0.221429), (-0.361804, 0.22361, -0.262863), (-0.406365, 0.25115, -0.147619), (-0.343579, 0.125576, -0.340858), (-0.26597, 0.251151, -0.340856), (-0.361804, 0.22361, -0.262863), (-0.293893, 0.0, -0.404508), (-0.223608, 0.138198, -0.425324), (-0.343579, 0.125576, -0.340858), (-0.218003, -0.125576, -0.432094), (-0.154509, -0.0, -0.475528), (-0.293893, 0.0, -0.404508), (-0.343579, 0.125576, -0.340858), (-0.223608, 0.138198, -0.425324), (-0.26597, 0.251151, -0.340856), (-0.223608, 0.138198, -0.425324), (-0.131434, 0.262869, -0.404506), (-0.26597, 0.251151, -0.340856), (-0.293893, 0.0, -0.404508), (-0.154509, -0.0, -0.475528), (-0.223608, 0.138198, -0.425324), (-0.154509, -0.0, -0.475528), (-0.0691, 0.138198, -0.475528), (-0.223608, 0.138198, -0.425324), (-0.223608, 0.138198, -0.425324), (-0.0691, 0.138198, -0.475528), (-0.131434, 0.262869, -0.404506), (-0.0691, 0.138198, -0.475528), (0.01482, 0.251151, -0.432092), (-0.131434, 0.262869, -0.404506), (-0.218003, -0.125576, -0.432094), (-0.077608, -0.125576, -0.477711), (-0.154509, -0.0, -0.475528), (-0.077608, -0.125576, -0.477711), (-0.0, -0.0, -0.5), (-0.154509, -0.0, -0.475528), (-0.154509, -0.0, -0.475528), (-0.0, -0.0, -0.5), (-0.0691, 0.138198, -0.475528), (-0.0, -0.0, -0.5), (0.077608, 0.125576, -0.477711), (-0.0691, 0.138198, -0.475528), (-0.0691, 0.138198, -0.475528), (0.077608, 0.125576, -0.477711), (0.01482, 0.251151, -0.432092), (0.077608, 0.125576, -0.477711), (0.138194, 0.22361, -0.425325), (0.01482, 0.251151, -0.432092), (0.218003, 0.125576, -0.432094), (0.241986, 0.251151, -0.358282), (0.138194, 0.22361, -0.425325), (0.293893, 0.0, -0.404508), (0.33541, 0.138198, -0.344095), (0.218003, 0.125576, -0.432094), (0.343579, -0.125576, -0.340858), (0.404509, -1e-06, -0.293891), (0.293893, 0.0, -0.404508), (0.218003, 0.125576, -0.432094), (0.33541, 0.138198, -0.344095), (0.241986, 0.251151, -0.358282), (0.33541, 0.138198, -0.344095), (0.344095, 0.262868, -0.249998), (0.241986, 0.251151, -0.358282), (0.293893, 0.0, -0.404508), (0.404509, -1e-06, -0.293891), (0.33541, 0.138198, -0.344095), (0.404509, -1e-06, -0.293891), (0.430902, 0.138197, -0.212662), (0.33541, 0.138198, -0.344095), (0.33541, 0.138198, -0.344095), (0.430902, 0.138197, -0.212662), (0.344095, 0.262868, -0.249998), (0.430902, 0.138197, -0.212662), (0.415525, 0.251149, -0.119427), (0.344095, 0.262868, -0.249998), (0.343579, -0.125576, -0.340858), (0.430349, -0.125575, -0.221429), (0.404509, -1e-06, -0.293891), (0.430349, -0.125575, -0.221429), (0.475529, -0.0, -0.154506), (0.404509, -1e-06, -0.293891), (0.404509, -1e-06, -0.293891), (0.475529, -0.0, -0.154506), (0.430902, 0.138197, -0.212662), (0.475529, -0.0, -0.154506), (0.478313, 0.125575, -0.073809), (0.430902, 0.138197, -0.212662), (0.430902, 0.138197, -0.212662), (0.478313, 0.125575, -0.073809), (0.415525, 0.251149, -0.119427), (0.478313, 0.125575, -0.073809), (0.447213, 0.223608, 0.0), (0.415525, 0.251149, -0.119427), (0.218003, 0.125576, 0.432094), (0.077608, 0.125576, 0.477711), (0.138194, 0.22361, 0.425325), (0.293893, -0.0, 0.404508), (0.154509, -0.0, 0.475528), (0.218003, 0.125576, 0.432094), (0.343579, -0.125576, 0.340858), (0.223608, -0.138199, 0.425324), (0.293893, -0.0, 0.404508), (0.218003, 0.125576, 0.432094), (0.154509, -0.0, 0.475528), (0.077608, 0.125576, 0.477711), (0.154509, -0.0, 0.475528), (-0.0, 0.0, 0.5), (0.077608, 0.125576, 0.477711), (0.293893, -0.0, 0.404508), (0.223608, -0.138199, 0.425324), (0.154509, -0.0, 0.475528), (0.223608, -0.138199, 0.425324), (0.0691, -0.138199, 0.475527), (0.154509, -0.0, 0.475528), (0.154509, -0.0, 0.475528), (0.0691, -0.138199, 0.475527), (-0.0, 0.0, 0.5), (0.0691, -0.138199, 0.475527), (-0.077608, -0.125576, 0.477711), (-0.0, 0.0, 0.5), (0.343579, -0.125576, 0.340858), (0.26597, -0.251151, 0.340856), (0.223608, -0.138199, 0.425324), (0.26597, -0.251151, 0.340856), (0.131434, -0.262869, 0.404506), (0.223608, -0.138199, 0.425324), (0.223608, -0.138199, 0.425324), (0.131434, -0.262869, 0.404506), (0.0691, -0.138199, 0.475527), (0.131434, -0.262869, 0.404506), (-0.01482, -0.251151, 0.432092), (0.0691, -0.138199, 0.475527), (0.0691, -0.138199, 0.475527), (-0.01482, -0.251151, 0.432092), (-0.077608, -0.125576, 0.477711), (-0.01482, -0.251151, 0.432092), (-0.138194, -0.22361, 0.425325), (-0.077608, -0.125576, 0.477711), (-0.343579, 0.125576, 0.340858), (-0.430349, 0.125575, 0.221429), (-0.361804, 0.22361, 0.262863), (-0.293893, -0.0, 0.404508), (-0.404509, -0.0, 0.293892), (-0.343579, 0.125576, 0.340858), (-0.218003, -0.125576, 0.432094), (-0.335409, -0.138199, 0.344095), (-0.293893, -0.0, 0.404508), (-0.343579, 0.125576, 0.340858), (-0.404509, -0.0, 0.293892), (-0.430349, 0.125575, 0.221429), (-0.404509, -0.0, 0.293892), (-0.475529, 0.0, 0.154506), (-0.430349, 0.125575, 0.221429), (-0.293893, -0.0, 0.404508), (-0.335409, -0.138199, 0.344095), (-0.404509, -0.0, 0.293892), (-0.335409, -0.138199, 0.344095), (-0.430902, -0.138198, 0.212662), (-0.404509, -0.0, 0.293892), (-0.404509, -0.0, 0.293892), (-0.430902, -0.138198, 0.212662), (-0.475529, 0.0, 0.154506), (-0.430902, -0.138198, 0.212662), (-0.478313, -0.125575, 0.073809), (-0.475529, 0.0, 0.154506), (-0.218003, -0.125576, 0.432094), (-0.241986, -0.251151, 0.358282), (-0.335409, -0.138199, 0.344095), (-0.241986, -0.251151, 0.358282), (-0.344095, -0.262868, 0.249998), (-0.335409, -0.138199, 0.344095), (-0.335409, -0.138199, 0.344095), (-0.344095, -0.262868, 0.249998), (-0.430902, -0.138198, 0.212662), (-0.344095, -0.262868, 0.249998), (-0.415525, -0.251149, 0.119427), (-0.430902, -0.138198, 0.212662), (-0.430902, -0.138198, 0.212662), (-0.415525, -0.251149, 0.119427), (-0.478313, -0.125575, 0.073809), (-0.415525, -0.251149, 0.119427), (-0.447213, -0.223608, 0.0), (-0.478313, -0.125575, 0.073809), (-0.430349, 0.125575, -0.221429), (-0.343579, 0.125576, -0.340858), (-0.361804, 0.22361, -0.262863), (-0.475529, -0.0, -0.154506), (-0.404509, 0.0, -0.293892), (-0.430349, 0.125575, -0.221429), (-0.478313, -0.125575, -0.073809), (-0.430902, -0.138198, -0.212662), (-0.475529, -0.0, -0.154506), (-0.430349, 0.125575, -0.221429), (-0.404509, 0.0, -0.293892), (-0.343579, 0.125576, -0.340858), (-0.404509, 0.0, -0.293892), (-0.293893, 0.0, -0.404508), (-0.343579, 0.125576, -0.340858), (-0.475529, -0.0, -0.154506), (-0.430902, -0.138198, -0.212662), (-0.404509, 0.0, -0.293892), (-0.430902, -0.138198, -0.212662), (-0.33541, -0.138199, -0.344095), (-0.404509, 0.0, -0.293892), (-0.404509, 0.0, -0.293892), (-0.33541, -0.138199, -0.344095), (-0.293893, 0.0, -0.404508), (-0.33541, -0.138199, -0.344095), (-0.218003, -0.125576, -0.432094), (-0.293893, 0.0, -0.404508), (-0.478313, -0.125575, -0.073809), (-0.415525, -0.251149, -0.119427), (-0.430902, -0.138198, -0.212662), (-0.415525, -0.251149, -0.119427), (-0.344095, -0.262868, -0.249998), (-0.430902, -0.138198, -0.212662), (-0.430902, -0.138198, -0.212662), (-0.344095, -0.262868, -0.249998), (-0.33541, -0.138199, -0.344095), (-0.344095, -0.262868, -0.249998), (-0.241986, -0.251151, -0.358282), (-0.33541, -0.138199, -0.344095), (-0.33541, -0.138199, -0.344095), (-0.241986, -0.251151, -0.358282), (-0.218003, -0.125576, -0.432094), (-0.241986, -0.251151, -0.358282), (-0.138194, -0.22361, -0.425325), (-0.218003, -0.125576, -0.432094), (0.077608, 0.125576, -0.477711), (0.218003, 0.125576, -0.432094), (0.138194, 0.22361, -0.425325), (-0.0, -0.0, -0.5), (0.154509, 0.0, -0.475528), (0.077608, 0.125576, -0.477711), (-0.077608, -0.125576, -0.477711), (0.0691, -0.138199, -0.475527), (-0.0, -0.0, -0.5), (0.077608, 0.125576, -0.477711), (0.154509, 0.0, -0.475528), (0.218003, 0.125576, -0.432094), (0.154509, 0.0, -0.475528), (0.293893, 0.0, -0.404508), (0.218003, 0.125576, -0.432094), (-0.0, -0.0, -0.5), (0.0691, -0.138199, -0.475527), (0.154509, 0.0, -0.475528), (0.0691, -0.138199, -0.475527), (0.223608, -0.138199, -0.425324), (0.154509, 0.0, -0.475528), (0.154509, 0.0, -0.475528), (0.223608, -0.138199, -0.425324), (0.293893, 0.0, -0.404508), (0.223608, -0.138199, -0.425324), (0.343579, -0.125576, -0.340858), (0.293893, 0.0, -0.404508), (-0.077608, -0.125576, -0.477711), (-0.01482, -0.251151, -0.432092), (0.0691, -0.138199, -0.475527), (-0.01482, -0.251151, -0.432092), (0.131434, -0.262869, -0.404506), (0.0691, -0.138199, -0.475527), (0.0691, -0.138199, -0.475527), (0.131434, -0.262869, -0.404506), (0.223608, -0.138199, -0.425324), (0.131434, -0.262869, -0.404506), (0.26597, -0.251151, -0.340856), (0.223608, -0.138199, -0.425324), (0.223608, -0.138199, -0.425324), (0.26597, -0.251151, -0.340856), (0.343579, -0.125576, -0.340858), (0.26597, -0.251151, -0.340856), (0.361804, -0.22361, -0.262863), (0.343579, -0.125576, -0.340858), (0.478313, 0.125575, -0.073809), (0.478313, 0.125575, 0.073809), (0.447213, 0.223608, 0.0), (0.475529, -0.0, -0.154506), (0.5, 0.0, 0.0), (0.478313, 0.125575, -0.073809), (0.430349, -0.125575, -0.221429), (0.473607, -0.138198, -0.081229), (0.475529, -0.0, -0.154506), (0.478313, 0.125575, -0.073809), (0.5, 0.0, 0.0), (0.478313, 0.125575, 0.073809), (0.5, 0.0, 0.0), (0.475529, 0.0, 0.154506), (0.478313, 0.125575, 0.073809), (0.475529, -0.0, -0.154506), (0.473607, -0.138198, -0.081229), (0.5, 0.0, 0.0), (0.473607, -0.138198, -0.081229), (0.473607, -0.138198, 0.081229), (0.5, 0.0, 0.0), (0.5, 0.0, 0.0), (0.473607, -0.138198, 0.081229), (0.475529, 0.0, 0.154506), (0.473607, -0.138198, 0.081229), (0.430349, -0.125575, 0.221429), (0.475529, 0.0, 0.154506), (0.430349, -0.125575, -0.221429), (0.406365, -0.25115, -0.147619), (0.473607, -0.138198, -0.081229), (0.406365, -0.25115, -0.147619), (0.425324, -0.262868, 0.0), (0.473607, -0.138198, -0.081229), (0.473607, -0.138198, -0.081229), (0.425324, -0.262868, 0.0), (0.473607, -0.138198, 0.081229), (0.425324, -0.262868, 0.0), (0.406365, -0.25115, 0.147619), (0.473607, -0.138198, 0.081229), (0.473607, -0.138198, 0.081229), (0.406365, -0.25115, 0.147619), (0.430349, -0.125575, 0.221429), (0.406365, -0.25115, 0.147619), (0.361804, -0.22361, 0.262863), (0.430349, -0.125575, 0.221429), (0.304773, -0.328759, 0.221428), (0.26597, -0.251151, 0.340856), (0.361804, -0.22361, 0.262863), (0.212661, -0.425327, 0.154506), (0.180902, -0.361806, 0.29389), (0.304773, -0.328759, 0.221428), (0.10159, -0.483975, 0.073809), (0.069098, -0.447215, 0.212661), (0.212661, -0.425327, 0.154506), (0.304773, -0.328759, 0.221428), (0.180902, -0.361806, 0.29389), (0.26597, -0.251151, 0.340856), (0.180902, -0.361806, 0.29389), (0.131434, -0.262869, 0.404506), (0.26597, -0.251151, 0.340856), (0.212661, -0.425327, 0.154506), (0.069098, -0.447215, 0.212661), (0.180902, -0.361806, 0.29389), (0.069098, -0.447215, 0.212661), (0.026395, -0.361805, 0.344093), (0.180902, -0.361806, 0.29389), (0.180902, -0.361806, 0.29389), (0.026395, -0.361805, 0.344093), (0.131434, -0.262869, 0.404506), (0.026395, -0.361805, 0.344093), (-0.01482, -0.251151, 0.432092), (0.131434, -0.262869, 0.404506), (0.10159, -0.483975, 0.073809), (-0.038803, -0.483975, 0.119426), (0.069098, -0.447215, 0.212661), (-0.038803, -0.483975, 0.119426), (-0.081228, -0.425327, 0.249998), (0.069098, -0.447215, 0.212661), (0.069098, -0.447215, 0.212661), (-0.081228, -0.425327, 0.249998), (0.026395, -0.361805, 0.344093), (-0.081228, -0.425327, 0.249998), (-0.116411, -0.32876, 0.358282), (0.026395, -0.361805, 0.344093), (0.026395, -0.361805, 0.344093), (-0.116411, -0.32876, 0.358282), (-0.01482, -0.251151, 0.432092), (-0.116411, -0.32876, 0.358282), (-0.138194, -0.22361, 0.425325), (-0.01482, -0.251151, 0.432092), (-0.116411, -0.32876, 0.358282), (-0.241986, -0.251151, 0.358282), (-0.138194, -0.22361, 0.425325), (-0.081228, -0.425327, 0.249998), (-0.223605, -0.361806, 0.262864), (-0.116411, -0.32876, 0.358282), (-0.038803, -0.483975, 0.119426), (-0.180901, -0.447214, 0.131431), (-0.081228, -0.425327, 0.249998), (-0.116411, -0.32876, 0.358282), (-0.223605, -0.361806, 0.262864), (-0.241986, -0.251151, 0.358282), (-0.223605, -0.361806, 0.262864), (-0.344095, -0.262868, 0.249998), (-0.241986, -0.251151, 0.358282), (-0.081228, -0.425327, 0.249998), (-0.180901, -0.447214, 0.131431), (-0.223605, -0.361806, 0.262864), (-0.180901, -0.447214, 0.131431), (-0.319097, -0.361805, 0.131431), (-0.223605, -0.361806, 0.262864), (-0.223605, -0.361806, 0.262864), (-0.319097, -0.361805, 0.131431), (-0.344095, -0.262868, 0.249998), (-0.319097, -0.361805, 0.131431), (-0.415525, -0.251149, 0.119427), (-0.344095, -0.262868, 0.249998), (-0.038803, -0.483975, 0.119426), (-0.125573, -0.483974, 0.0), (-0.180901, -0.447214, 0.131431), (-0.125573, -0.483974, 0.0), (-0.262865, -0.425326, 0.0), (-0.180901, -0.447214, 0.131431), (-0.180901, -0.447214, 0.131431), (-0.262865, -0.425326, 0.0), (-0.319097, -0.361805, 0.131431), (-0.262865, -0.425326, 0.0), (-0.376721, -0.328757, 0.0), (-0.319097, -0.361805, 0.131431), (-0.319097, -0.361805, 0.131431), (-0.376721, -0.328757, 0.0), (-0.415525, -0.251149, 0.119427), (-0.376721, -0.328757, 0.0), (-0.447213, -0.223608, 0.0), (-0.415525, -0.251149, 0.119427), (-0.376721, -0.328757, 0.0), (-0.415525, -0.251149, -0.119427), (-0.447213, -0.223608, 0.0), (-0.262865, -0.425326, 0.0), (-0.319097, -0.361805, -0.131432), (-0.376721, -0.328757, 0.0), (-0.125573, -0.483974, 0.0), (-0.180901, -0.447214, -0.131432), (-0.262865, -0.425326, 0.0), (-0.376721, -0.328757, 0.0), (-0.319097, -0.361805, -0.131432), (-0.415525, -0.251149, -0.119427), (-0.319097, -0.361805, -0.131432), (-0.344095, -0.262868, -0.249998), (-0.415525, -0.251149, -0.119427), (-0.262865, -0.425326, 0.0), (-0.180901, -0.447214, -0.131432), (-0.319097, -0.361805, -0.131432), (-0.180901, -0.447214, -0.131432), (-0.223605, -0.361805, -0.262864), (-0.319097, -0.361805, -0.131432), (-0.319097, -0.361805, -0.131432), (-0.223605, -0.361805, -0.262864), (-0.344095, -0.262868, -0.249998), (-0.223605, -0.361805, -0.262864), (-0.241986, -0.251151, -0.358282), (-0.344095, -0.262868, -0.249998), (-0.125573, -0.483974, 0.0), (-0.038803, -0.483975, -0.119426), (-0.180901, -0.447214, -0.131432), (-0.038803, -0.483975, -0.119426), (-0.081228, -0.425327, -0.249998), (-0.180901, -0.447214, -0.131432), (-0.180901, -0.447214, -0.131432), (-0.081228, -0.425327, -0.249998), (-0.223605, -0.361805, -0.262864), (-0.081228, -0.425327, -0.249998), (-0.116411, -0.32876, -0.358282), (-0.223605, -0.361805, -0.262864), (-0.223605, -0.361805, -0.262864), (-0.116411, -0.32876, -0.358282), (-0.241986, -0.251151, -0.358282), (-0.116411, -0.32876, -0.358282), (-0.138194, -0.22361, -0.425325), (-0.241986, -0.251151, -0.358282), (0.406365, -0.25115, 0.147619), (0.304773, -0.328759, 0.221428), (0.361804, -0.22361, 0.262863), (0.425324, -0.262868, 0.0), (0.335409, -0.361805, 0.081228), (0.406365, -0.25115, 0.147619), (0.406365, -0.25115, -0.147619), (0.335409, -0.361805, -0.081229), (0.425324, -0.262868, 0.0), (0.406365, -0.25115, 0.147619), (0.335409, -0.361805, 0.081228), (0.304773, -0.328759, 0.221428), (0.335409, -0.361805, 0.081228), (0.212661, -0.425327, 0.154506), (0.304773, -0.328759, 0.221428), (0.425324, -0.262868, 0.0), (0.335409, -0.361805, -0.081229), (0.335409, -0.361805, 0.081228), (0.335409, -0.361805, -0.081229), (0.223605, -0.447214, -0.0), (0.335409, -0.361805, 0.081228), (0.335409, -0.361805, 0.081228), (0.223605, -0.447214, -0.0), (0.212661, -0.425327, 0.154506), (0.223605, -0.447214, -0.0), (0.10159, -0.483975, 0.073809), (0.212661, -0.425327, 0.154506), (0.406365, -0.25115, -0.147619), (0.304773, -0.328759, -0.221428), (0.335409, -0.361805, -0.081229), (0.304773, -0.328759, -0.221428), (0.212661, -0.425327, -0.154506), (0.335409, -0.361805, -0.081229), (0.335409, -0.361805, -0.081229), (0.212661, -0.425327, -0.154506), (0.223605, -0.447214, -0.0), (0.212661, -0.425327, -0.154506), (0.10159, -0.483975, -0.073809), (0.223605, -0.447214, -0.0), (0.223605, -0.447214, -0.0), (0.10159, -0.483975, -0.073809), (0.10159, -0.483975, 0.073809), (0.10159, -0.483975, -0.073809), (-0.0, -0.5, 0.0), (0.10159, -0.483975, 0.073809), (-0.116411, -0.32876, -0.358282), (-0.01482, -0.251151, -0.432092), (-0.138194, -0.22361, -0.425325), (-0.081228, -0.425327, -0.249998), (0.026395, -0.361806, -0.344093), (-0.116411, -0.32876, -0.358282), (-0.038803, -0.483975, -0.119426), (0.069099, -0.447215, -0.21266), (-0.081228, -0.425327, -0.249998), (-0.116411, -0.32876, -0.358282), (0.026395, -0.361806, -0.344093), (-0.01482, -0.251151, -0.432092), (0.026395, -0.361806, -0.344093), (0.131434, -0.262869, -0.404506), (-0.01482, -0.251151, -0.432092), (-0.081228, -0.425327, -0.249998), (0.069099, -0.447215, -0.21266), (0.026395, -0.361806, -0.344093), (0.069099, -0.447215, -0.21266), (0.180902, -0.361805, -0.29389), (0.026395, -0.361806, -0.344093), (0.026395, -0.361806, -0.344093), (0.180902, -0.361805, -0.29389), (0.131434, -0.262869, -0.404506), (0.180902, -0.361805, -0.29389), (0.26597, -0.251151, -0.340856), (0.131434, -0.262869, -0.404506), (-0.038803, -0.483975, -0.119426), (0.10159, -0.483975, -0.073809), (0.069099, -0.447215, -0.21266), (0.10159, -0.483975, -0.073809), (0.212661, -0.425327, -0.154506), (0.069099, -0.447215, -0.21266), (0.069099, -0.447215, -0.21266), (0.212661, -0.425327, -0.154506), (0.180902, -0.361805, -0.29389), (0.212661, -0.425327, -0.154506), (0.304773, -0.328759, -0.221428), (0.180902, -0.361805, -0.29389), (0.180902, -0.361805, -0.29389), (0.304773, -0.328759, -0.221428), (0.26597, -0.251151, -0.340856), (0.304773, -0.328759, -0.221428), (0.361804, -0.22361, -0.262863), (0.26597, -0.251151, -0.340856))
# vertices = (
# (0,0,0), (1,1,0), (0,1,0)
# )
norms = generate_normals(vertices)
# print(norms)
# from ursina import *
# app = Ursina()
# m = Mesh(vertices=vertices)
# m.generate_normals()
# e = Entity(model=m)
# # print(e.normals)
# if e.normals:
# verts = list()
# for i in range(len(e.vertices)):
# verts.append(e.vertices[i])
# verts.append(Vec3(e.vertices[i][0], e.vertices[i][1], e.vertices[i][2])
# + Vec3(e.normals[i][0], e.normals[i][1], e.normals[i][2])*2)
#
# lines=Entity(model=Mesh(verts, mode='line'))
# # e.shader = 'shader_normals'
# EditorCamera()
# app.run()
| 324.638095
| 30,548
| 0.605656
| 6,264
| 34,087
| 3.29007
| 0.039432
| 0.044544
| 0.03712
| 0.010093
| 0.903683
| 0.90213
| 0.90213
| 0.846135
| 0.842545
| 0.788927
| 0
| 0.61623
| 0.113944
| 34,087
| 104
| 30,549
| 327.759615
| 0.066119
| 0.043682
| 0
| 0.04
| 0
| 0
| 0.000246
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.04
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b1a404dbbeecbf269885e0ae34b8e905d6453dac
| 24,913
|
py
|
Python
|
msgraph-cli-extensions/v1_0/mail_v1_0/azext_mail_v1_0/vendored_sdks/mail/models/_mail_enums.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/v1_0/mail_v1_0/azext_mail_v1_0/vendored_sdks/mail/models/_mail_enums.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | 22
|
2022-03-29T22:54:37.000Z
|
2022-03-29T22:55:27.000Z
|
msgraph-cli-extensions/v1_0/mail_v1_0/azext_mail_v1_0/vendored_sdks/mail/models/_mail_enums.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class Enum14(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CHILD_FOLDER_COUNT = "childFolderCount"
DISPLAY_NAME = "displayName"
PARENT_FOLDER_ID = "parentFolderId"
TOTAL_ITEM_COUNT = "totalItemCount"
UNREAD_ITEM_COUNT = "unreadItemCount"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum15(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum16(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CHILD_FOLDER_COUNT = "childFolderCount"
CHILD_FOLDER_COUNT_DESC = "childFolderCount desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
PARENT_FOLDER_ID = "parentFolderId"
PARENT_FOLDER_ID_DESC = "parentFolderId desc"
TOTAL_ITEM_COUNT = "totalItemCount"
TOTAL_ITEM_COUNT_DESC = "totalItemCount desc"
UNREAD_ITEM_COUNT = "unreadItemCount"
UNREAD_ITEM_COUNT_DESC = "unreadItemCount desc"
class Enum17(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CHILD_FOLDER_COUNT = "childFolderCount"
DISPLAY_NAME = "displayName"
PARENT_FOLDER_ID = "parentFolderId"
TOTAL_ITEM_COUNT = "totalItemCount"
UNREAD_ITEM_COUNT = "unreadItemCount"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum19(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CHILD_FOLDER_COUNT = "childFolderCount"
DISPLAY_NAME = "displayName"
PARENT_FOLDER_ID = "parentFolderId"
TOTAL_ITEM_COUNT = "totalItemCount"
UNREAD_ITEM_COUNT = "unreadItemCount"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum21(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
ACTIONS = "actions"
ACTIONS_DESC = "actions desc"
CONDITIONS = "conditions"
CONDITIONS_DESC = "conditions desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
EXCEPTIONS = "exceptions"
EXCEPTIONS_DESC = "exceptions desc"
HAS_ERROR = "hasError"
HAS_ERROR_DESC = "hasError desc"
IS_ENABLED = "isEnabled"
IS_ENABLED_DESC = "isEnabled desc"
IS_READ_ONLY = "isReadOnly"
IS_READ_ONLY_DESC = "isReadOnly desc"
SEQUENCE = "sequence"
SEQUENCE_DESC = "sequence desc"
class Enum22(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ACTIONS = "actions"
CONDITIONS = "conditions"
DISPLAY_NAME = "displayName"
EXCEPTIONS = "exceptions"
HAS_ERROR = "hasError"
IS_ENABLED = "isEnabled"
IS_READ_ONLY = "isReadOnly"
SEQUENCE = "sequence"
class Enum23(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ACTIONS = "actions"
CONDITIONS = "conditions"
DISPLAY_NAME = "displayName"
EXCEPTIONS = "exceptions"
HAS_ERROR = "hasError"
IS_ENABLED = "isEnabled"
IS_READ_ONLY = "isReadOnly"
SEQUENCE = "sequence"
class Enum24(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CATEGORIES = "categories"
CATEGORIES_DESC = "categories desc"
CHANGE_KEY = "changeKey"
CHANGE_KEY_DESC = "changeKey desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
BCC_RECIPIENTS = "bccRecipients"
BCC_RECIPIENTS_DESC = "bccRecipients desc"
BODY = "body"
BODY_DESC = "body desc"
BODY_PREVIEW = "bodyPreview"
BODY_PREVIEW_DESC = "bodyPreview desc"
CC_RECIPIENTS = "ccRecipients"
CC_RECIPIENTS_DESC = "ccRecipients desc"
CONVERSATION_ID = "conversationId"
CONVERSATION_ID_DESC = "conversationId desc"
CONVERSATION_INDEX = "conversationIndex"
CONVERSATION_INDEX_DESC = "conversationIndex desc"
FLAG = "flag"
FLAG_DESC = "flag desc"
FROM_ENUM = "from"
FROM_DESC = "from desc"
HAS_ATTACHMENTS = "hasAttachments"
HAS_ATTACHMENTS_DESC = "hasAttachments desc"
IMPORTANCE = "importance"
IMPORTANCE_DESC = "importance desc"
INFERENCE_CLASSIFICATION = "inferenceClassification"
INFERENCE_CLASSIFICATION_DESC = "inferenceClassification desc"
INTERNET_MESSAGE_HEADERS = "internetMessageHeaders"
INTERNET_MESSAGE_HEADERS_DESC = "internetMessageHeaders desc"
INTERNET_MESSAGE_ID = "internetMessageId"
INTERNET_MESSAGE_ID_DESC = "internetMessageId desc"
IS_DELIVERY_RECEIPT_REQUESTED = "isDeliveryReceiptRequested"
IS_DELIVERY_RECEIPT_REQUESTED_DESC = "isDeliveryReceiptRequested desc"
IS_DRAFT = "isDraft"
IS_DRAFT_DESC = "isDraft desc"
IS_READ = "isRead"
IS_READ_DESC = "isRead desc"
IS_READ_RECEIPT_REQUESTED = "isReadReceiptRequested"
IS_READ_RECEIPT_REQUESTED_DESC = "isReadReceiptRequested desc"
PARENT_FOLDER_ID = "parentFolderId"
PARENT_FOLDER_ID_DESC = "parentFolderId desc"
RECEIVED_DATE_TIME = "receivedDateTime"
RECEIVED_DATE_TIME_DESC = "receivedDateTime desc"
REPLY_TO = "replyTo"
REPLY_TO_DESC = "replyTo desc"
SENDER = "sender"
SENDER_DESC = "sender desc"
SENT_DATE_TIME = "sentDateTime"
SENT_DATE_TIME_DESC = "sentDateTime desc"
SUBJECT = "subject"
SUBJECT_DESC = "subject desc"
TO_RECIPIENTS = "toRecipients"
TO_RECIPIENTS_DESC = "toRecipients desc"
UNIQUE_BODY = "uniqueBody"
UNIQUE_BODY_DESC = "uniqueBody desc"
WEB_LINK = "webLink"
WEB_LINK_DESC = "webLink desc"
class Enum25(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CATEGORIES = "categories"
CHANGE_KEY = "changeKey"
CREATED_DATE_TIME = "createdDateTime"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
BCC_RECIPIENTS = "bccRecipients"
BODY = "body"
BODY_PREVIEW = "bodyPreview"
CC_RECIPIENTS = "ccRecipients"
CONVERSATION_ID = "conversationId"
CONVERSATION_INDEX = "conversationIndex"
FLAG = "flag"
FROM_ENUM = "from"
HAS_ATTACHMENTS = "hasAttachments"
IMPORTANCE = "importance"
INFERENCE_CLASSIFICATION = "inferenceClassification"
INTERNET_MESSAGE_HEADERS = "internetMessageHeaders"
INTERNET_MESSAGE_ID = "internetMessageId"
IS_DELIVERY_RECEIPT_REQUESTED = "isDeliveryReceiptRequested"
IS_DRAFT = "isDraft"
IS_READ = "isRead"
IS_READ_RECEIPT_REQUESTED = "isReadReceiptRequested"
PARENT_FOLDER_ID = "parentFolderId"
RECEIVED_DATE_TIME = "receivedDateTime"
REPLY_TO = "replyTo"
SENDER = "sender"
SENT_DATE_TIME = "sentDateTime"
SUBJECT = "subject"
TO_RECIPIENTS = "toRecipients"
UNIQUE_BODY = "uniqueBody"
WEB_LINK = "webLink"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum26(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum27(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CATEGORIES = "categories"
CHANGE_KEY = "changeKey"
CREATED_DATE_TIME = "createdDateTime"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
BCC_RECIPIENTS = "bccRecipients"
BODY = "body"
BODY_PREVIEW = "bodyPreview"
CC_RECIPIENTS = "ccRecipients"
CONVERSATION_ID = "conversationId"
CONVERSATION_INDEX = "conversationIndex"
FLAG = "flag"
FROM_ENUM = "from"
HAS_ATTACHMENTS = "hasAttachments"
IMPORTANCE = "importance"
INFERENCE_CLASSIFICATION = "inferenceClassification"
INTERNET_MESSAGE_HEADERS = "internetMessageHeaders"
INTERNET_MESSAGE_ID = "internetMessageId"
IS_DELIVERY_RECEIPT_REQUESTED = "isDeliveryReceiptRequested"
IS_DRAFT = "isDraft"
IS_READ = "isRead"
IS_READ_RECEIPT_REQUESTED = "isReadReceiptRequested"
PARENT_FOLDER_ID = "parentFolderId"
RECEIVED_DATE_TIME = "receivedDateTime"
REPLY_TO = "replyTo"
SENDER = "sender"
SENT_DATE_TIME = "sentDateTime"
SUBJECT = "subject"
TO_RECIPIENTS = "toRecipients"
UNIQUE_BODY = "uniqueBody"
WEB_LINK = "webLink"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum28(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum29(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CONTENT_TYPE = "contentType"
CONTENT_TYPE_DESC = "contentType desc"
IS_INLINE = "isInline"
IS_INLINE_DESC = "isInline desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
SIZE = "size"
SIZE_DESC = "size desc"
class Enum30(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CONTENT_TYPE = "contentType"
IS_INLINE = "isInline"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
SIZE = "size"
class Enum31(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CONTENT_TYPE = "contentType"
IS_INLINE = "isInline"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
SIZE = "size"
class Enum32(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
class Enum33(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
VALUE = "value"
VALUE_DESC = "value desc"
class Enum34(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum35(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum36(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
VALUE = "value"
VALUE_DESC = "value desc"
class Enum37(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum38(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum39(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
VALUE = "value"
VALUE_DESC = "value desc"
class Enum40(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum41(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum42(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
VALUE = "value"
VALUE_DESC = "value desc"
class Enum43(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum44(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum45(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CATEGORIES = "categories"
CATEGORIES_DESC = "categories desc"
CHANGE_KEY = "changeKey"
CHANGE_KEY_DESC = "changeKey desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
BCC_RECIPIENTS = "bccRecipients"
BCC_RECIPIENTS_DESC = "bccRecipients desc"
BODY = "body"
BODY_DESC = "body desc"
BODY_PREVIEW = "bodyPreview"
BODY_PREVIEW_DESC = "bodyPreview desc"
CC_RECIPIENTS = "ccRecipients"
CC_RECIPIENTS_DESC = "ccRecipients desc"
CONVERSATION_ID = "conversationId"
CONVERSATION_ID_DESC = "conversationId desc"
CONVERSATION_INDEX = "conversationIndex"
CONVERSATION_INDEX_DESC = "conversationIndex desc"
FLAG = "flag"
FLAG_DESC = "flag desc"
FROM_ENUM = "from"
FROM_DESC = "from desc"
HAS_ATTACHMENTS = "hasAttachments"
HAS_ATTACHMENTS_DESC = "hasAttachments desc"
IMPORTANCE = "importance"
IMPORTANCE_DESC = "importance desc"
INFERENCE_CLASSIFICATION = "inferenceClassification"
INFERENCE_CLASSIFICATION_DESC = "inferenceClassification desc"
INTERNET_MESSAGE_HEADERS = "internetMessageHeaders"
INTERNET_MESSAGE_HEADERS_DESC = "internetMessageHeaders desc"
INTERNET_MESSAGE_ID = "internetMessageId"
INTERNET_MESSAGE_ID_DESC = "internetMessageId desc"
IS_DELIVERY_RECEIPT_REQUESTED = "isDeliveryReceiptRequested"
IS_DELIVERY_RECEIPT_REQUESTED_DESC = "isDeliveryReceiptRequested desc"
IS_DRAFT = "isDraft"
IS_DRAFT_DESC = "isDraft desc"
IS_READ = "isRead"
IS_READ_DESC = "isRead desc"
IS_READ_RECEIPT_REQUESTED = "isReadReceiptRequested"
IS_READ_RECEIPT_REQUESTED_DESC = "isReadReceiptRequested desc"
PARENT_FOLDER_ID = "parentFolderId"
PARENT_FOLDER_ID_DESC = "parentFolderId desc"
RECEIVED_DATE_TIME = "receivedDateTime"
RECEIVED_DATE_TIME_DESC = "receivedDateTime desc"
REPLY_TO = "replyTo"
REPLY_TO_DESC = "replyTo desc"
SENDER = "sender"
SENDER_DESC = "sender desc"
SENT_DATE_TIME = "sentDateTime"
SENT_DATE_TIME_DESC = "sentDateTime desc"
SUBJECT = "subject"
SUBJECT_DESC = "subject desc"
TO_RECIPIENTS = "toRecipients"
TO_RECIPIENTS_DESC = "toRecipients desc"
UNIQUE_BODY = "uniqueBody"
UNIQUE_BODY_DESC = "uniqueBody desc"
WEB_LINK = "webLink"
WEB_LINK_DESC = "webLink desc"
class Enum46(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CATEGORIES = "categories"
CHANGE_KEY = "changeKey"
CREATED_DATE_TIME = "createdDateTime"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
BCC_RECIPIENTS = "bccRecipients"
BODY = "body"
BODY_PREVIEW = "bodyPreview"
CC_RECIPIENTS = "ccRecipients"
CONVERSATION_ID = "conversationId"
CONVERSATION_INDEX = "conversationIndex"
FLAG = "flag"
FROM_ENUM = "from"
HAS_ATTACHMENTS = "hasAttachments"
IMPORTANCE = "importance"
INFERENCE_CLASSIFICATION = "inferenceClassification"
INTERNET_MESSAGE_HEADERS = "internetMessageHeaders"
INTERNET_MESSAGE_ID = "internetMessageId"
IS_DELIVERY_RECEIPT_REQUESTED = "isDeliveryReceiptRequested"
IS_DRAFT = "isDraft"
IS_READ = "isRead"
IS_READ_RECEIPT_REQUESTED = "isReadReceiptRequested"
PARENT_FOLDER_ID = "parentFolderId"
RECEIVED_DATE_TIME = "receivedDateTime"
REPLY_TO = "replyTo"
SENDER = "sender"
SENT_DATE_TIME = "sentDateTime"
SUBJECT = "subject"
TO_RECIPIENTS = "toRecipients"
UNIQUE_BODY = "uniqueBody"
WEB_LINK = "webLink"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum47(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum48(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CATEGORIES = "categories"
CHANGE_KEY = "changeKey"
CREATED_DATE_TIME = "createdDateTime"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
BCC_RECIPIENTS = "bccRecipients"
BODY = "body"
BODY_PREVIEW = "bodyPreview"
CC_RECIPIENTS = "ccRecipients"
CONVERSATION_ID = "conversationId"
CONVERSATION_INDEX = "conversationIndex"
FLAG = "flag"
FROM_ENUM = "from"
HAS_ATTACHMENTS = "hasAttachments"
IMPORTANCE = "importance"
INFERENCE_CLASSIFICATION = "inferenceClassification"
INTERNET_MESSAGE_HEADERS = "internetMessageHeaders"
INTERNET_MESSAGE_ID = "internetMessageId"
IS_DELIVERY_RECEIPT_REQUESTED = "isDeliveryReceiptRequested"
IS_DRAFT = "isDraft"
IS_READ = "isRead"
IS_READ_RECEIPT_REQUESTED = "isReadReceiptRequested"
PARENT_FOLDER_ID = "parentFolderId"
RECEIVED_DATE_TIME = "receivedDateTime"
REPLY_TO = "replyTo"
SENDER = "sender"
SENT_DATE_TIME = "sentDateTime"
SUBJECT = "subject"
TO_RECIPIENTS = "toRecipients"
UNIQUE_BODY = "uniqueBody"
WEB_LINK = "webLink"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum49(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
ATTACHMENTS = "attachments"
EXTENSIONS = "extensions"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum5(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CLASSIFY_AS = "classifyAs"
SENDER_EMAIL_ADDRESS = "senderEmailAddress"
class Enum50(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CONTENT_TYPE = "contentType"
CONTENT_TYPE_DESC = "contentType desc"
IS_INLINE = "isInline"
IS_INLINE_DESC = "isInline desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
SIZE = "size"
SIZE_DESC = "size desc"
class Enum51(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CONTENT_TYPE = "contentType"
IS_INLINE = "isInline"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
SIZE = "size"
class Enum52(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CONTENT_TYPE = "contentType"
IS_INLINE = "isInline"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
SIZE = "size"
class Enum53(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
class Enum54(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
VALUE = "value"
VALUE_DESC = "value desc"
class Enum55(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum56(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum57(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
VALUE = "value"
VALUE_DESC = "value desc"
class Enum58(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum59(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
VALUE = "value"
class Enum6(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CHILD_FOLDER_COUNT = "childFolderCount"
CHILD_FOLDER_COUNT_DESC = "childFolderCount desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
PARENT_FOLDER_ID = "parentFolderId"
PARENT_FOLDER_ID_DESC = "parentFolderId desc"
TOTAL_ITEM_COUNT = "totalItemCount"
TOTAL_ITEM_COUNT_DESC = "totalItemCount desc"
UNREAD_ITEM_COUNT = "unreadItemCount"
UNREAD_ITEM_COUNT_DESC = "unreadItemCount desc"
class Enum7(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CHILD_FOLDER_COUNT = "childFolderCount"
DISPLAY_NAME = "displayName"
PARENT_FOLDER_ID = "parentFolderId"
TOTAL_ITEM_COUNT = "totalItemCount"
UNREAD_ITEM_COUNT = "unreadItemCount"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Enum8(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Get1ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
OVERRIDES = "overrides"
class Get2ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
OVERRIDES = "overrides"
class Get4ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class Get6ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CLASSIFY_AS = "classifyAs"
CLASSIFY_AS_DESC = "classifyAs desc"
SENDER_EMAIL_ADDRESS = "senderEmailAddress"
SENDER_EMAIL_ADDRESS_DESC = "senderEmailAddress desc"
class Get7ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CLASSIFY_AS = "classifyAs"
SENDER_EMAIL_ADDRESS = "senderEmailAddress"
class Get9ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CHILD_FOLDERS = "childFolders"
MESSAGE_RULES = "messageRules"
MESSAGES = "messages"
MULTI_VALUE_EXTENDED_PROPERTIES = "multiValueExtendedProperties"
SINGLE_VALUE_EXTENDED_PROPERTIES = "singleValueExtendedProperties"
class MicrosoftGraphBodyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
TEXT = "text"
HTML = "html"
class MicrosoftGraphFollowupFlagStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NOT_FLAGGED = "notFlagged"
COMPLETE = "complete"
FLAGGED = "flagged"
class MicrosoftGraphImportance(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
LOW = "low"
NORMAL = "normal"
HIGH = "high"
class MicrosoftGraphInferenceClassificationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
FOCUSED = "focused"
OTHER = "other"
class MicrosoftGraphMessageActionFlag(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ANY = "any"
CALL = "call"
DO_NOT_FORWARD = "doNotForward"
FOLLOW_UP = "followUp"
FYI = "fyi"
FORWARD = "forward"
NO_RESPONSE_NECESSARY = "noResponseNecessary"
READ = "read"
REPLY = "reply"
REPLY_TO_ALL = "replyToAll"
REVIEW = "review"
class MicrosoftGraphSensitivity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NORMAL = "normal"
PERSONAL = "personal"
PRIVATE = "private"
CONFIDENTIAL = "confidential"
| 32.737188
| 101
| 0.724
| 2,418
| 24,913
| 7.124069
| 0.117866
| 0.046035
| 0.125392
| 0.135841
| 0.880413
| 0.865436
| 0.862475
| 0.859747
| 0.859747
| 0.856496
| 0
| 0.004867
| 0.183559
| 24,913
| 760
| 102
| 32.780263
| 0.842035
| 0.028901
| 0
| 0.836276
| 0
| 0
| 0.275269
| 0.0686
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00321
| false
| 0
| 0.017657
| 0.001605
| 1.009631
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
491b84a620d34ef5280874f413a686f1ddf5f2d7
| 40,232
|
py
|
Python
|
src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyNumber/autorestnumbertestservice/operations/number_operations.py
|
fhoering/autorest
|
b36c77ebb6a5c92aca72eea0894a683506af5817
|
[
"MIT"
] | null | null | null |
src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyNumber/autorestnumbertestservice/operations/number_operations.py
|
fhoering/autorest
|
b36c77ebb6a5c92aca72eea0894a683506af5817
|
[
"MIT"
] | null | null | null |
src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyNumber/autorestnumbertestservice/operations/number_operations.py
|
fhoering/autorest
|
b36c77ebb6a5c92aca72eea0894a683506af5817
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class NumberOperations(object):
"""NumberOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get null Number value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_invalid_float(
self, custom_headers=None, raw=False, **operation_config):
"""Get invalid float Number value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/invalidfloat'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_invalid_double(
self, custom_headers=None, raw=False, **operation_config):
"""Get invalid double Number value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/invaliddouble'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_invalid_decimal(
self, custom_headers=None, raw=False, **operation_config):
"""Get invalid decimal Number value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: decimal
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/invaliddecimal'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('decimal', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_big_float(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put big float value 3.402823e+20.
:param number_body:
:type number_body: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/float/3.402823e+20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'float')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_big_float(
self, custom_headers=None, raw=False, **operation_config):
"""Get big float value 3.402823e+20.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/float/3.402823e+20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_big_double(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put big double value 2.5976931e+101.
:param number_body:
:type number_body: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/double/2.5976931e+101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'float')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_big_double(
self, custom_headers=None, raw=False, **operation_config):
"""Get big double value 2.5976931e+101.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/double/2.5976931e+101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_big_double_positive_decimal(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put big double value 99999999.99.
:param number_body:
:type number_body: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/double/99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'float')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_big_double_positive_decimal(
self, custom_headers=None, raw=False, **operation_config):
"""Get big double value 99999999.99.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/double/99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_big_double_negative_decimal(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put big double value -99999999.99.
:param number_body:
:type number_body: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/double/-99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'float')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_big_double_negative_decimal(
self, custom_headers=None, raw=False, **operation_config):
"""Get big double value -99999999.99.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/double/-99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_big_decimal(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put big decimal value 2.5976931e+101.
:param number_body:
:type number_body: decimal
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/decimal/2.5976931e+101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'decimal')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_big_decimal(
self, custom_headers=None, raw=False, **operation_config):
"""Get big decimal value 2.5976931e+101.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: decimal
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/decimal/2.5976931e+101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('decimal', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_big_decimal_positive_decimal(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put big decimal value 99999999.99.
:param number_body:
:type number_body: decimal
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/decimal/99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'decimal')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_big_decimal_positive_decimal(
self, custom_headers=None, raw=False, **operation_config):
"""Get big decimal value 99999999.99.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: decimal
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/decimal/99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('decimal', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_big_decimal_negative_decimal(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put big decimal value -99999999.99.
:param number_body:
:type number_body: decimal
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/decimal/-99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'decimal')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_big_decimal_negative_decimal(
self, custom_headers=None, raw=False, **operation_config):
"""Get big decimal value -99999999.99.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: decimal
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/big/decimal/-99999999.99'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('decimal', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_small_float(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put small float value 3.402823e-20.
:param number_body:
:type number_body: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/small/float/3.402823e-20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'float')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_small_float(
self, custom_headers=None, raw=False, **operation_config):
"""Get big double value 3.402823e-20.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/small/float/3.402823e-20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_small_double(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put small double value 2.5976931e-101.
:param number_body:
:type number_body: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/small/double/2.5976931e-101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'float')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_small_double(
self, custom_headers=None, raw=False, **operation_config):
"""Get big double value 2.5976931e-101.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: float
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/small/double/2.5976931e-101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('float', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_small_decimal(
self, number_body, custom_headers=None, raw=False, **operation_config):
"""Put small decimal value 2.5976931e-101.
:param number_body:
:type number_body: decimal
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/small/decimal/2.5976931e-101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(number_body, 'decimal')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_small_decimal(
self, custom_headers=None, raw=False, **operation_config):
"""Get small decimal value 2.5976931e-101.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: decimal
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyNumber.models.ErrorException>`
"""
# Construct URL
url = '/number/small/decimal/2.5976931e-101'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('decimal', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| 36.475068
| 90
| 0.653137
| 4,035
| 40,232
| 6.345725
| 0.033209
| 0.04874
| 0.043117
| 0.029682
| 0.972193
| 0.972193
| 0.972193
| 0.971802
| 0.971802
| 0.969967
| 0
| 0.018431
| 0.262328
| 40,232
| 1,102
| 91
| 36.508167
| 0.844329
| 0.362498
| 0
| 0.918919
| 0
| 0
| 0.081984
| 0.029926
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056306
| false
| 0
| 0.004505
| 0
| 0.148649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4933ad1ce8bb461bbc6e954df40a7372943a5c18
| 2,950
|
py
|
Python
|
todo/views.py
|
masashimorita/django-todo-api
|
ab60904eac5cbb777982156cc9f99b333e1c2946
|
[
"MIT"
] | null | null | null |
todo/views.py
|
masashimorita/django-todo-api
|
ab60904eac5cbb777982156cc9f99b333e1c2946
|
[
"MIT"
] | null | null | null |
todo/views.py
|
masashimorita/django-todo-api
|
ab60904eac5cbb777982156cc9f99b333e1c2946
|
[
"MIT"
] | null | null | null |
from rest_framework import generics
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from core.models import TodoList, TodoTask
from todo import serializers
class TodoListView(generics.ListCreateAPIView):
"""List and Create TodoList"""
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
queryset = TodoList.objects.all()
serializer_class = serializers.TodoListSerializer
def get_queryset(self):
"""Return objects for the current authenticated user only"""
return self.queryset.filter(user=self.request.user).order_by('-name')
def perform_create(self, serializer):
"""Create a new TodoList"""
serializer.save(user=self.request.user)
class TodoListDetailView(generics.RetrieveUpdateDestroyAPIView):
"""Manage Todo List"""
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
queryset = TodoList.objects.all()
serializer_class = serializers.TodoListSerializer
def get_queryset(self):
"""Return objects for the current authenticated user only"""
return self.queryset.filter(user=self.request.user).order_by('-name')
class TodoTaskListView(generics.ListCreateAPIView):
"""List and Create TodoTask"""
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
queryset = TodoTask.objects.all()
serializer_class = serializers.TodoTaskSerializer
def todo_list_pk(self):
"""Get TodoList PK from request context"""
kwargs = self.request.parser_context.get('kwargs')
return kwargs['todo_list_pk']
def get_queryset(self):
"""Return objects for the current authenticated user only"""
return self.queryset.filter(
user=self.request.user,
todo_list_id=self.todo_list_pk()
).order_by('priority')
def perform_create(self, serializer):
"""Create a new todo task"""
todo_list = TodoList.objects.get(
id=self.todo_list_pk(),
user=self.request.user
)
serializer.save(user=self.request.user, todo_list=todo_list)
class TodoTaskDetailView(generics.RetrieveUpdateDestroyAPIView):
"""List and Create TodoTask"""
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
queryset = TodoTask.objects.all()
serializer_class = serializers.TodoTaskSerializer
def todo_list_pk(self):
"""Get TodoList PK from request context"""
kwargs = self.request.parser_context.get('kwargs')
return kwargs['todo_list_pk']
def get_queryset(self):
"""Return objects for the current authenticated user only"""
return self.queryset.filter(
user=self.request.user,
todo_list_id=self.todo_list_pk()
).order_by('priority')
| 35.97561
| 77
| 0.70678
| 315
| 2,950
| 6.466667
| 0.190476
| 0.051055
| 0.051546
| 0.065292
| 0.782032
| 0.74325
| 0.706922
| 0.706922
| 0.667649
| 0.667649
| 0
| 0
| 0.195254
| 2,950
| 81
| 78
| 36.419753
| 0.858045
| 0.147119
| 0
| 0.716981
| 0
| 0
| 0.025255
| 0
| 0
| 0
| 0
| 0.012346
| 0
| 1
| 0.150943
| false
| 0
| 0.09434
| 0
| 0.735849
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
4951302e622b419b9ff234eb4c043722062a086e
| 126,222
|
py
|
Python
|
tests/test_trial.py
|
ecs-vlc/torchbearer
|
9d97c60ec4deb37a0627311ddecb9c6f1429cd82
|
[
"MIT"
] | 358
|
2018-07-23T13:30:38.000Z
|
2019-06-02T07:18:35.000Z
|
tests/test_trial.py
|
jonhare/torchbearer
|
ea5f2d57bccb35c493e9c18992bc6fc645a5b2f0
|
[
"MIT"
] | 307
|
2018-07-18T12:07:23.000Z
|
2019-06-03T18:00:27.000Z
|
tests/test_trial.py
|
jonhare/torchbearer
|
ea5f2d57bccb35c493e9c18992bc6fc645a5b2f0
|
[
"MIT"
] | 42
|
2018-07-23T22:49:23.000Z
|
2019-05-20T07:22:55.000Z
|
from unittest import TestCase
from mock import MagicMock, Mock, patch, ANY, create_autospec
import torch
from torch.utils.data import DataLoader
import torchbearer
import torchbearer.callbacks as callbacks
from torchbearer import Trial, State
from torchbearer.metrics import Metric
from torchbearer.trial import deep_to, load_batch_none, load_batch_predict, load_batch_standard, load_batch_infinite, update_device_and_dtype, CallbackListInjection
class _StateMaker(object):
def __getitem__(self, keys):
if not isinstance(keys, tuple):
keys = (keys,)
assert all(isinstance(key, slice) for key in keys)
state = State()
for k in keys:
state[k.start] = k.stop
return state
make_state = _StateMaker()
class TestMockOptimizer(TestCase):
@patch('torchbearer.trial.Optimizer')
def test_mock_optimizer(self, mock_opt):
mock_opt.add_param_group = Mock()
mock_opt.load_state_dict = Mock()
mock_opt.state_dict = Mock()
mock_opt.step = Mock()
mock_opt.zero_grad = Mock()
opt = torchbearer.trial.MockOptimizer()
self.assertIsNone(opt.add_param_group({}))
mock_opt.add_param_group.assert_not_called()
self.assertIsNone(opt.load_state_dict({}))
mock_opt.load_state_dict.assert_not_called()
self.assertDictEqual(opt.state_dict(), {})
mock_opt.state_dict.assert_not_called()
self.assertIsNone(opt.step())
mock_opt.step.assert_not_called()
self.assertIsNone(opt.zero_grad())
mock_opt.zero_grad.assert_not_called()
def test_mock_optimizer_closure(self):
t = Trial(None)
closure = Mock()
opt = t.state[torchbearer.OPTIMIZER]
opt.step(closure)
self.assertTrue(closure.call_count == 1)
class TestCallbackListInjection(TestCase):
def test_pass_through(self):
mock = MagicMock()
injection = CallbackListInjection(None, mock)
# state_dict
mock.state_dict.return_value = 'test'
self.assertEqual(injection.state_dict(), 'test')
self.assertEqual(mock.state_dict.call_count, 1)
# load_state_dict
injection.load_state_dict('test')
mock.load_state_dict.assert_called_once_with('test')
# iter
mock.__iter__.return_value = ['iterator']
self.assertEqual(next(injection.__iter__()), 'iterator')
self.assertEqual(mock.__iter__.call_count, 1)
# copy
mock.copy.return_value = 'copy'
self.assertEqual(injection.copy(), 'copy')
# append
injection.append('stuff to append')
mock.append.assert_called_once_with('stuff to append')
def test_order(self):
d = {'my_number': 10}
@callbacks.on_start
def set_one(state):
d['my_number'] = 1
set_one.on_end = Mock()
@callbacks.on_start
def set_two(state):
d['my_number'] = 2
set_two.on_end = Mock()
injection = CallbackListInjection(set_one, callbacks.CallbackList([set_two]))
injection.on_end({})
self.assertEqual(set_one.on_end.call_count, 1)
self.assertEqual(set_two.on_end.call_count, 1)
injection.on_start({})
self.assertEqual(d['my_number'], 2)
class TestWithGenerators(TestCase):
def test_with_train_generator_state_filled(self):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_train_generator(generator, 1)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_GENERATOR] == generator)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == 1)
@patch('warnings.warn')
def test_with_train_generator_too_many_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_train_generator(generator, 10)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == 10)
@patch('warnings.warn')
def test_with_train_generator_inf_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_train_generator(generator, -1)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == -1)
@patch('warnings.warn')
def test_with_train_generator_fractional_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_train_generator(generator, 1.5)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == 1)
@patch('warnings.warn')
def test_with_train_generator_negative_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_train_generator(generator, -2)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == -2)
@patch('warnings.warn')
def test_with_train_generator_none_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_train_generator(generator, None)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == 2)
@patch('warnings.warn')
def test_with_train_generator_old_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_train_steps(100)
torchbearertrial.with_train_generator(generator, None)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == 100)
def test_with_val_generator_state_filled(self):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_val_generator(generator, 1)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_GENERATOR] == generator)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == 1)
@patch('warnings.warn')
def test_with_val_generator_too_many_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_val_generator(generator, 10)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == 10)
@patch('warnings.warn')
def test_with_val_generator_fractional_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_val_generator(generator, 1.5)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == 1)
@patch('warnings.warn')
def test_with_val_generator_negative_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_val_generator(generator, -2)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == -2)
@patch('warnings.warn')
def test_with_val_generator_none_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_val_generator(generator, None)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == 2)
@patch('warnings.warn')
def test_with_val_generator_old_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_val_steps(100)
torchbearertrial.with_val_generator(generator, None)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == 100)
def test_with_test_generator_state_filled(self):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_test_generator(generator, 1)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_GENERATOR] == generator)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == 1)
@patch('warnings.warn')
def test_with_test_generator_too_many_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_test_generator(generator, 10)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == 10)
@patch('warnings.warn')
def test_with_test_generator_fractional_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_test_generator(generator, 1.5)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == 1)
@patch('warnings.warn')
def test_with_test_generator_negative_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_test_generator(generator, -2)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == -2)
@patch('warnings.warn')
def test_with_test_generator_none_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_test_generator(generator, None)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == 2)
@patch('warnings.warn')
def test_with_test_generator_old_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_test_steps(100)
torchbearertrial.with_test_generator(generator, None)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == 100)
@patch('warnings.warn')
def test_for_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
train_steps = 1
val_steps = 2
test_steps = 3
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
trainstep = torchbearertrial.for_train_steps = MagicMock()
valstep = torchbearertrial.for_val_steps = MagicMock()
teststep = torchbearertrial.for_test_steps = MagicMock()
torchbearertrial.for_steps(train_steps, val_steps, test_steps)
trainstep.assert_called_once_with(train_steps)
valstep.assert_called_once_with(val_steps)
teststep.assert_called_once_with(test_steps)
@patch('warnings.warn')
def test_with_generators(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
train_generator = MagicMock()
train_generator.__len__.return_value = 2
train_steps = 1
val_generator = MagicMock()
val_generator.__len__.return_value = 3
val_steps = 2
test_generator = MagicMock()
test_generator.__len__.return_value = 4
test_steps = 3
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
traingen = torchbearertrial.with_train_generator = MagicMock()
valgen = torchbearertrial.with_val_generator = MagicMock()
testgen = torchbearertrial.with_test_generator = MagicMock()
torchbearertrial.with_generators(train_generator, val_generator, test_generator, train_steps, val_steps, test_steps)
traingen.assert_called_once_with(train_generator, train_steps)
valgen.assert_called_once_with(val_generator, val_steps)
testgen.assert_called_once_with(test_generator, test_steps)
@patch('warnings.warn')
def test_for_inf_train_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_inf_train_steps()
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == -1)
@patch('warnings.warn')
def test_for_inf_val_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_inf_val_steps()
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == -1)
@patch('warnings.warn')
def test_for_inf_test_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_inf_test_steps()
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == -1)
@patch('warnings.warn')
def test_for_inf_steps(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_inf_steps(True, True, True)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == -1)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] == -1)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == -1)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_inf_steps(True, False, True)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == -1)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] != -1)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] == -1)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_inf_steps(True, False, False)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] == -1)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] != -1)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] != -1)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.for_inf_steps(False, False, False)
self.assertTrue(torchbearertrial.state[torchbearer.TRAIN_STEPS] != -1)
self.assertTrue(torchbearertrial.state[torchbearer.VALIDATION_STEPS] != -1)
self.assertTrue(torchbearertrial.state[torchbearer.TEST_STEPS] != -1)
@patch('warnings.warn')
def test_with_inf_train_loader(self, _):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_inf_train_loader()
self.assertTrue(torchbearertrial.state[torchbearer.INF_TRAIN_LOADING])
class TestWithData(TestCase):
@patch('torchbearer.trial.TensorDataset')
@patch('torchbearer.trial.DataLoader')
def test_with_train_data(self, d, td):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
x = torch.rand(1,5)
y = torch.rand(1,5)
d.return_value = -1
steps = 4
shuffle = False
num_workers = 1
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_train_generator = MagicMock()
torchbearertrial.with_train_data(x, y, 1, shuffle=shuffle, num_workers=num_workers, steps=steps)
d.assert_called_once_with(ANY, 1, shuffle=shuffle, num_workers=num_workers)
torchbearertrial.with_train_generator.assert_called_once_with(-1, steps=4)
td.assert_called_once_with(x,y)
@patch('torchbearer.trial.TensorDataset')
@patch('torchbearer.trial.DataLoader')
def test_with_val_data(self, d, td):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
x = torch.rand(1,5)
y = torch.rand(1,5)
d.return_value = -1
steps = 4
shuffle = False
num_workers = 1
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_val_generator = MagicMock()
torchbearertrial.with_val_data(x, y, 1, shuffle=shuffle, num_workers=num_workers, steps=steps)
d.assert_called_once_with(ANY, 1, shuffle=shuffle, num_workers=num_workers)
torchbearertrial.with_val_generator.assert_called_once_with(-1, steps=4)
td.assert_called_once_with(x,y)
@patch('torchbearer.trial.TensorDataset')
@patch('torchbearer.trial.DataLoader')
def test_with_test_data(self, d, td):
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
metric = Metric('test')
criterion = None
generator = MagicMock()
generator.__len__.return_value = 2
x = torch.rand(1,5)
y = torch.rand(1,5)
d.return_value = -1
steps = 4
num_workers = 1
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric])
torchbearertrial.with_test_generator = MagicMock()
torchbearertrial.with_test_data(x, 1, num_workers=num_workers, steps=steps)
d.assert_called_once_with(ANY, 1, num_workers=num_workers)
torchbearertrial.with_test_generator.assert_called_once_with(-1, steps=4)
td.assert_called_once_with(x)
def test_with_data(self):
trial = Trial(None)
mock_train_data, mock_val_data, mock_test_data = Mock(), Mock(), Mock()
trial.with_train_data = mock_train_data
trial.with_val_data = mock_val_data
trial.with_test_data = mock_test_data
shuffle = True
batch_size = 30
one_tensor = torch.Tensor([1])
target_tensor = torch.Tensor([10])
trial.with_data(one_tensor, target_tensor, one_tensor*2, target_tensor*2, one_tensor*3, batch_size,
train_steps=100, val_steps=200, test_steps=300, shuffle=shuffle)
self.assertTrue(mock_train_data.call_args[0] == (one_tensor, target_tensor, 30, shuffle, 1, 100))
self.assertTrue(mock_val_data.call_args[0] == (one_tensor*2, target_tensor*2, 30, shuffle, 1, 200))
self.assertTrue(mock_test_data.call_args[0] == (one_tensor*3, 30, 1, 300))
class TestWithClosureAndLoader(TestCase):
def test_with_closure(self):
def closure():
return 'test'
t = Trial(None)
t.with_closure(closure)
self.assertTrue(t.closure() == 'test')
def test_with_loader(self):
def loader(state):
print('test')
t = Trial(None)
t.with_loader(loader)
self.assertTrue(t.state[torchbearer.LOADER] == loader)
class TestRun(TestCase):
def test_run_callback_calls(self):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
callback = MagicMock()
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric], callbacks=[callback])
torchbearertrial._fit_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial._validation_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial.with_train_generator(generator, steps=train_steps)
torchbearertrial.run(epochs=epochs, verbose=0)
self.assertEqual(callback.on_start.call_count, 1)
self.assertEqual(callback.on_start_epoch.call_count, 1)
self.assertEqual(callback.on_end_epoch.call_count, 1)
self.assertEqual(callback.on_end.call_count, 1)
def test_run_epochs_ran_normal(self):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 4
callback = MagicMock()
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric], callbacks=[callback])
torchbearertrial._fit_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial._validation_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial.with_train_generator(generator, steps=train_steps)
torchbearertrial.run(epochs=epochs, verbose=0)
self.assertTrue(torchbearertrial._fit_pass.call_count == epochs)
def test_run_epochs_ran_negative(self):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = -1
callback = MagicMock()
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric], callbacks=[callback])
torchbearertrial._fit_pass = Mock()
torchbearertrial._validation_pass = Mock()
torchbearertrial.with_train_generator(generator, steps=train_steps)
torchbearertrial.run(epochs=epochs, verbose=0)
self.assertTrue(torchbearertrial._fit_pass.call_count == 0)
def test_run_epochs_history_populated(self):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 10
callback = MagicMock()
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric], callbacks=[callback])
torchbearertrial._fit_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial._validation_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial.with_train_generator(generator, steps=train_steps)
torchbearertrial.state[torchbearer.HISTORY] = [1,2,3,4,5]
torchbearertrial.run(epochs=epochs, verbose=0)
self.assertTrue(torchbearertrial._fit_pass.call_count == 5)
@patch('warnings.warn')
def test_run_fit_pass_Args(self, _):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = 1
torchbearertrial = Trial(torchmodel, None, None, [], callbacks=[])
torchbearertrial._fit_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial._validation_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial.with_train_generator(generator, steps=train_steps)
torchbearertrial.run(epochs=epochs, verbose=0)
self.assertEqual(torchbearertrial._fit_pass.call_count, 1)
def test_run_stop_training(self):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 10
callback = MagicMock()
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric], callbacks=[callback])
torchbearertrial._fit_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial._validation_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial.with_train_generator(generator, steps=train_steps)
torchbearertrial.state[torchbearer.STOP_TRAINING] = True
torchbearertrial.run(epochs=epochs, verbose=0)
self.assertEqual(callback.on_start_epoch.call_count, 1)
self.assertTrue(callback.on_end_epoch.call_count == 0)
self.assertEqual(callback.on_end.call_count, 1)
def test_run_stop_training_second(self):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 10
callback = MagicMock()
@torchbearer.callbacks.on_end_epoch
def stop_callback(state):
state[torchbearer.STOP_TRAINING] = True
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric], callbacks=[stop_callback, callback])
torchbearertrial._fit_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial._validation_pass = Mock(return_value={torchbearer.METRICS: {}})
torchbearertrial.with_train_generator(generator, steps=train_steps)
torchbearertrial.run(epochs=epochs, verbose=0)
self.assertEqual(callback.on_start_epoch.call_count, 1)
self.assertEqual(callback.on_end_epoch.call_count, 1)
self.assertEqual(callback.on_end.call_count, 1)
def test_run_history_metrics(self):
metric = Metric('test')
metric.process = Mock(return_value={'test': 0})
metric.process_final = Mock(return_value={'test': 0})
metric.reset = Mock(return_value=None)
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
callback = MagicMock()
torchmodel = MagicMock()
torchmodel.forward = Mock(return_value=1)
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
torchbearertrial = Trial(torchmodel, optimizer, criterion, [metric], callbacks=[callback])
torchbearertrial._fit_pass = Mock(return_value={torchbearer.METRICS: {'fit_test': 1}})
torchbearertrial._validation_pass = Mock(return_value={'val_test': 2})
torchbearertrial.with_train_generator(generator, steps=train_steps)
history = torchbearertrial.run(epochs=epochs, verbose=0)
self.assertDictEqual(history[0], {'train_steps': train_steps, 'validation_steps': None, 'fit_test': 1, 'val_test': 2})
class TestFitPass(TestCase):
@patch('torchbearer.CallbackListInjection')
def test_fit_train_called(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion, torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.BACKWARD_ARGS: {},
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertEqual(torchbearertrial.train.call_count, 1)
@patch('torchbearer.CallbackListInjection')
def test_fit_metrics_reset(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion, torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.BACKWARD_ARGS: {},
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertEqual(metric_list.reset.call_count, 1)
@patch('torchbearer.CallbackListInjection')
def test_fit_callback_calls(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
optimizer = MagicMock()
optimizer.step = lambda closure: closure()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion, torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.BACKWARD_ARGS: {}
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertEqual(callback_list.on_start_training.call_count, 1)
self.assertTrue(callback_list.on_sample.call_count == 3)
self.assertTrue(callback_list.on_forward.call_count == 3)
self.assertTrue(callback_list.on_criterion.call_count == 3)
self.assertTrue(callback_list.on_backward.call_count == 3)
self.assertTrue(callback_list.on_step_training.call_count == 3)
self.assertEqual(callback_list.on_end_training.call_count, 1)
@patch('torchbearer.CallbackListInjection')
def test_fit_optimizer_calls(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
optimizer = MagicMock()
optimizer.step = Mock(side_effect=lambda closure: closure())
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion, torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.BACKWARD_ARGS: {}
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertTrue(optimizer.zero_grad.call_count == 3)
self.assertTrue(optimizer.step.call_count == 3)
@patch('torchbearer.CallbackListInjection')
def test_fit_forward_call_no_state(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
optimizer = MagicMock()
optimizer.step = lambda closure: closure()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion, torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.BACKWARD_ARGS: {}
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertTrue(torchmodel.call_count == 3)
self.assertTrue(torchmodel.call_args_list[0][0][0].item() == 1)
@patch('torchbearer.CallbackListInjection')
def test_fit_forward_call_with_state(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
optimizer = MagicMock()
optimizer.step = lambda closure: closure()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0,
torchbearer.BACKWARD_ARGS: {}
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertTrue(torchmodel.call_count == 3)
self.assertTrue(len(torchmodel.call_args_list[0][1]) == 1)
@patch('torchbearer.CallbackListInjection')
def test_fit_criterion(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
optimizer.step = lambda closure: closure()
loss = torch.tensor([2.0], requires_grad=True)
def crit_sig(y_pred, y_true):
return loss
criterion = create_autospec(crit_sig)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0,
torchbearer.BACKWARD_ARGS: {}, torchbearer.GENERATOR: generator
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None,
torchbearer.GENERATOR: generator}
torchbearertrial._fit_pass(state)
self.assertTrue(criterion.call_count == 3)
self.assertTrue(criterion.call_args_list[0][0][0] == 5)
self.assertTrue(criterion.call_args_list[0][0][1].item() == 1.0)
def test_fit_criterion_passed_state(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
optimizer.step = lambda closure: closure()
loss = torch.tensor([2.0], requires_grad=True)
def crit_sig(state):
return loss
criterion = create_autospec(crit_sig)
metric_list = MagicMock()
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer, torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0,
torchbearer.BACKWARD_ARGS: {}
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list, torchbearer.LOADER: None,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False,}
torchbearertrial._fit_pass(state)
self.assertTrue(criterion.call_count == 3)
self.assertTrue(criterion.call_args_list[0][0][0] == state)
@patch('torchbearer.CallbackListInjection')
def test_fit_backward(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
optimizer.step = lambda closure: closure()
loss = MagicMock()
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.LOADER: None,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0,
torchbearer.BACKWARD_ARGS: {}
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.INF_TRAIN_LOADING: False, torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertTrue(loss.backward.call_count == 3)
@patch('torchbearer.CallbackListInjection')
def test_fit_metrics_process(self, mock_inj):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
callback_list = MagicMock()
mock_inj.return_value = callback_list
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer, torchbearer.INF_TRAIN_LOADING: False, torchbearer.BACKWARD_ARGS: {},
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertTrue(metric_list.process.call_count == 3)
def test_fit_metrics_final(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer, torchbearer.INF_TRAIN_LOADING: False, torchbearer.BACKWARD_ARGS: {},
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float,
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
history = torchbearertrial._fit_pass(state)[torchbearer.METRICS]
self.assertEqual(metric_list.process_final.call_count, 1)
self.assertTrue(history['test'] == 2)
def test_fit_stop_training(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
train_steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: True, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer, torchbearer.INF_TRAIN_LOADING: False,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.BACKWARD_ARGS: {},
torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: train_steps, torchbearer.EPOCH: 0
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, train_steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
torchbearertrial._fit_pass(state)
self.assertEqual(metric_list.process.call_count, 1)
def test_fit_iterator_none(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = 1
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: True, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer, torchbearer.BACKWARD_ARGS: {},
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: None, torchbearer.TRAIN_STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1]
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: None, torchbearer.CALLBACK_LIST: callback_list, torchbearer.TRAIN_DATA: (None, steps), torchbearer.LOADER: None}
state = torchbearertrial._fit_pass(state)
self.assertTrue(state[torchbearer.ITERATOR] is None)
def test_fit_state_values(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = 1
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
optimizer.step = lambda closure: closure()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: True, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.TRAIN_GENERATOR: generator, torchbearer.TRAIN_STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.INF_TRAIN_LOADING: False,
torchbearer.BACKWARD_ARGS: {}
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.TRAIN_GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list,
torchbearer.TRAIN_DATA: (generator, steps), torchbearer.INF_TRAIN_LOADING: False, torchbearer.LOADER: None}
state = torchbearertrial._fit_pass(state)
self.assertTrue(state[torchbearer.ITERATOR] is not None)
self.assertTrue(state[torchbearer.Y_PRED] == 5)
self.assertTrue(state[torchbearer.LOSS].item() == 2)
self.assertTrue(state[torchbearer.METRICS]['test'] == 2)
class TestTestPass(TestCase):
def test_metric_reset(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertEqual(metric_list.reset.call_count, 1)
def test_callback_calls(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertEqual(callback_list.on_start_validation.call_count, 1)
self.assertTrue(callback_list.on_sample_validation.call_count == 3)
self.assertTrue(callback_list.on_forward_validation.call_count == 3)
self.assertTrue(callback_list.on_criterion_validation.call_count == 3)
self.assertTrue(callback_list.on_step_validation.call_count == 3)
self.assertEqual(callback_list.on_end_validation.call_count, 1)
def test_forward_no_state(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_standard
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertTrue(torchmodel.call_count == 3)
self.assertTrue(torchmodel.call_args_list[0][0][0].item() == 1)
def test_forward_with_state(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = True
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertTrue(torchmodel.call_count == 3)
self.assertTrue(len(torchmodel.call_args_list[0][1]) == 1)
def test_criterion(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
def spec_crit(y_pred, y_true):
pass
loss = torch.tensor([2.0], requires_grad=True)
criterion = create_autospec(spec_crit)
criterion.return_value = loss
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_standard
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertTrue(criterion.call_count == 3)
self.assertTrue(criterion.call_args_list[0][0][0] == 5)
self.assertTrue(criterion.call_args_list[0][0][1].item() == 1.0)
def test_criterion_multiple_outputs(self):
data = [(torch.Tensor([1]), (torch.Tensor([1]), torch.Tensor([1]))),
(torch.Tensor([2]), (torch.Tensor([2]), torch.Tensor([2]))),
(torch.Tensor([3]), (torch.Tensor([3]), torch.Tensor([3]))), ]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = [5, 5]
optimizer = MagicMock()
def spec_crit(y_pred1, y_pred2, y_true1, y_true2):
pass
loss = torch.tensor([2.0], requires_grad=True)
criterion = create_autospec(spec_crit)
criterion.return_value = loss
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_standard
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertTrue(criterion.call_count == 3)
self.assertTrue(criterion.call_args_list[0][0][0] == 5)
self.assertTrue(criterion.call_args_list[0][0][1] == 5)
self.assertTrue(criterion.call_args_list[0][0][2].item() == 1.0)
self.assertTrue(criterion.call_args_list[0][0][3].item() == 1.0)
def test_criterion_passed_state(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
def spec_crit(state):
pass
loss = torch.tensor([2.0], requires_grad=True)
criterion = create_autospec(spec_crit)
criterion.return_value = loss
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu', torchbearer.LOADER: None,
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_standard
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list, torchbearer.LOADER: None,}
torchbearertrial._test_pass(state)
self.assertTrue(criterion.call_count == 3)
self.assertTrue(criterion.call_args_list[0][0][0] == state)
def test_metric_process(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertTrue(metric_list.process.call_count == 3)
def test_metric_final(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: False, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
history = torchbearertrial._test_pass(state)
self.assertEqual(metric_list.process_final.call_count, 1)
self.assertTrue(history[torchbearer.METRICS]['test'] == 2)
def test_stop_training(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = len(data)
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: True, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
torchbearertrial._test_pass(state)
self.assertEqual(metric_list.process.call_count, 1)
def test_iterator_none(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = 1
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: True, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: None, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: None, torchbearer.CALLBACK_LIST: callback_list}
state = torchbearertrial._test_pass(state)
self.assertTrue(state[torchbearer.ITERATOR] is None)
def test_state_values(self):
data = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])),
(torch.Tensor([3]), torch.Tensor([3]))]
generator = DataLoader(data)
steps = 1
epochs = 1
torchmodel = MagicMock()
torchmodel.return_value = 5
optimizer = MagicMock()
loss = torch.tensor([2.0], requires_grad=True)
criterion = Mock(return_value=loss)
metric_list = MagicMock()
metric_list.process.return_value = {'test': 0}
metric_list.process_final.return_value = {'test': 2}
callback_list = MagicMock()
torchbearer.CallbackListInjection = Mock(return_value=callback_list)
state = make_state[
torchbearer.MAX_EPOCHS: epochs, torchbearer.STOP_TRAINING: True, torchbearer.MODEL: torchmodel, torchbearer.CRITERION: criterion,
torchbearer.OPTIMIZER: optimizer,
torchbearer.METRIC_LIST: metric_list, torchbearer.CALLBACK_LIST: callback_list, torchbearer.DEVICE: 'cpu',
torchbearer.DATA_TYPE: torch.float, torchbearer.HISTORY: [], torchbearer.GENERATOR: generator, torchbearer.STEPS: steps, torchbearer.EPOCH: 0,
torchbearer.X: data[0][0], torchbearer.Y_TRUE: data[0][1], torchbearer.SAMPLER: load_batch_none
]
torchbearertrial = Trial(torchmodel, optimizer, criterion, [], callbacks=[])
torchbearertrial.train = Mock()
torchbearertrial.pass_state = False
torchbearertrial.state = {torchbearer.GENERATOR: generator, torchbearer.CALLBACK_LIST: callback_list}
state = torchbearertrial._test_pass(state)
self.assertTrue(state[torchbearer.ITERATOR] is not None)
self.assertTrue(state[torchbearer.Y_PRED] == 5)
self.assertTrue(state[torchbearer.LOSS].item() == 2)
self.assertTrue(state[torchbearer.METRICS]['test'] == 2)
class TestTrialValEvalPred(TestCase):
def test_validation_pass(self):
generator = MagicMock()
steps = 5
torchbearer.CallbackListInjection = Mock()
state = {torchbearer.VALIDATION_GENERATOR: generator, torchbearer.VALIDATION_STEPS: steps, torchbearer.METRICS: 1}
t = Trial(MagicMock())
eval_mock = t.eval = Mock()
train_mock = t.train = Mock()
test_pass_mock = t._test_pass = Mock()
t.state = {torchbearer.VALIDATION_GENERATOR: generator, torchbearer.CALLBACK_LIST: None,
torchbearer.VALIDATION_DATA: (generator, steps), torchbearer.LOADER: None}
metrics = t._validation_pass(state)
self.assertEqual(eval_mock.call_count, 1)
self.assertEqual(test_pass_mock.call_count, 1)
test_pass_state = test_pass_mock.call_args[0][0]
self.assertTrue(test_pass_state[torchbearer.GENERATOR] == generator)
self.assertTrue(test_pass_state[torchbearer.STEPS] == steps)
self.assertTrue(metrics == 1)
def test_validation_pass_none(self):
generator = None
steps = None
torchbearer.CallbackListInjection = Mock()
state = {torchbearer.VALIDATION_GENERATOR: generator, torchbearer.VALIDATION_STEPS: steps, torchbearer.METRICS: 1}
t = Trial(MagicMock())
eval_mock = t.eval = Mock()
t._test_pass = Mock()
t.state = {torchbearer.VALIDATION_GENERATOR: generator, torchbearer.CALLBACK_LIST: None,
torchbearer.VALIDATION_DATA: (generator, steps), torchbearer.LOADER: None}
t._validation_pass(state)
self.assertTrue(eval_mock.call_count == 0)
def test_evaluate(self):
generator = MagicMock()
steps = 5
torchbearer.CallbackListInjection = Mock()
t = Trial(MagicMock())
eval_mock = t.eval = Mock()
clist = MagicMock()
state = {torchbearer.HISTORY: [{'train_steps': 'steps', 'train_metric': 2}], torchbearer.VALIDATION_GENERATOR: generator,
torchbearer.CALLBACK_LIST: clist, torchbearer.VALIDATION_STEPS: steps, torchbearer.VALIDATION_DATA: (generator, steps),
torchbearer.METRICS: {'val_metric': 1}, torchbearer.LOADER: None}
test_pass_mock = t._test_pass = Mock(return_value=state)
t.state = state
metrics = t.evaluate()
self.assertEqual(clist.on_start.call_count, 1)
self.assertEqual(clist.on_start_epoch.call_count, 1)
self.assertEqual(clist.on_end_epoch.call_count, 1)
self.assertEqual(clist.on_end.call_count, 1)
self.assertEqual(eval_mock.call_count, 1)
self.assertEqual(test_pass_mock.call_count, 1)
test_pass_state = test_pass_mock.call_args[0][0]
self.assertTrue(test_pass_state[torchbearer.GENERATOR] == generator)
self.assertTrue(test_pass_state[torchbearer.STEPS] == steps)
self.assertEqual(metrics['val_metric'], 1)
self.assertDictEqual(state[torchbearer.HISTORY][0], {'train_steps': 'steps', 'train_metric': 2, 'val_metric': 1})
def test_evaluate_none(self):
generator = None
steps = None
torchbearer.CallbackListInjection = Mock()
t = Trial(MagicMock())
eval_mock = t.eval = Mock()
test_pass_mock = t._test_pass = Mock(return_value={torchbearer.METRICS: 1})
t.state = {torchbearer.VALIDATION_GENERATOR: generator, torchbearer.CALLBACK_LIST: None,
torchbearer.VALIDATION_STEPS: steps, torchbearer.VALIDATION_DATA: (generator, steps), torchbearer.LOADER: None}
metrics = t.evaluate()
self.assertTrue(eval_mock.call_count == 0)
def test_predict(self):
generator = MagicMock()
steps = 5
torchbearer.CallbackListInjection = Mock()
state = {torchbearer.TEST_GENERATOR: generator, torchbearer.TEST_STEPS: steps, torchbearer.METRICS: 1}
t = Trial(MagicMock())
eval_mock = t.eval = Mock()
test_pass_mock = t._test_pass = Mock(return_value={torchbearer.FINAL_PREDICTIONS: 1})
clist = MagicMock()
t.state = {torchbearer.TEST_GENERATOR: generator, torchbearer.CALLBACK_LIST: clist, torchbearer.TEST_STEPS: steps,
torchbearer.TEST_DATA: (generator, steps), torchbearer.LOADER: None}
metrics = t.predict()
self.assertEqual(clist.on_start.call_count, 1)
self.assertEqual(clist.on_start_epoch.call_count, 1)
self.assertEqual(clist.on_end_epoch.call_count, 1)
self.assertEqual(clist.on_end.call_count, 1)
self.assertEqual(eval_mock.call_count, 1)
self.assertEqual(test_pass_mock.call_count, 1)
test_pass_state = test_pass_mock.call_args[0][0]
self.assertTrue(test_pass_state[torchbearer.GENERATOR] == generator)
self.assertTrue(test_pass_state[torchbearer.STEPS] == steps)
self.assertTrue(metrics == 1)
def test_predict_none(self):
generator = None
steps = None
torchbearer.CallbackListInjection = Mock()
state = {torchbearer.TEST_GENERATOR: generator, torchbearer.TEST_STEPS: steps, torchbearer.METRICS: 1}
t = Trial(MagicMock())
eval_mock = t.eval = Mock()
test_pass_mock = t._test_pass = Mock(return_value={torchbearer.FINAL_PREDICTIONS: 1})
t.state = {torchbearer.TEST_GENERATOR: generator, torchbearer.CALLBACK_LIST: None, torchbearer.TEST_STEPS: steps,
torchbearer.TEST_DATA: (generator, steps), torchbearer.LOADER: None}
metrics = t.predict()
self.assertTrue(eval_mock.call_count == 0)
class TestReplay(TestCase):
@patch('torchbearer.trial.Tqdm')
def test_replay_tqdm(self, tq):
t = Trial(MagicMock())
callback = MagicMock()
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback])
self.assertEqual(tq.call_count, 1)
@patch('torchbearer.trial.Tqdm')
def test_replay_no_tqdm(self, tq):
t = Trial(MagicMock())
callback = MagicMock()
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback], verbose=0)
tq.assert_not_called()
@patch('torchbearer.trial.Tqdm')
def test_replay_multi_call(self, mock_tqdm):
t = Trial(MagicMock())
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i + 1} for i in range(1)]
t.state[torchbearer.HISTORY] = history
t.replay(verbose=2)
mock_tqdm.reset_mock()
callback = MagicMock()
t.replay(callbacks=[callback], verbose=0)
mock_tqdm.assert_not_called()
def test_replay_callback_calls(self):
t = Trial(MagicMock())
callback = MagicMock()
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback], verbose=0)
self.assertEqual(callback.on_start.call_count, 1)
self.assertTrue(callback.on_sample.call_count == 100)
self.assertTrue(callback.on_sample_validation.call_count == 50)
def test_replay_none_train_steps(self):
t = Trial(MagicMock())
callback = MagicMock()
history = [{'train_steps': None, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback], verbose=0)
self.assertEqual(callback.on_start.call_count, 1)
self.assertTrue(callback.on_sample.call_count == 0)
self.assertTrue(callback.on_sample_validation.call_count == 50)
def test_replay_none_validation_steps(self):
t = Trial(MagicMock())
callback = MagicMock()
history = [{'train_steps': 10, 'validation_steps': None, 'test': i} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback], verbose=0)
self.assertEqual(callback.on_start.call_count, 1)
self.assertTrue(callback.on_sample.call_count == 100)
self.assertTrue(callback.on_sample_validation.call_count == 0)
def test_replay_one_batch_true(self):
t = Trial(MagicMock())
callback = MagicMock()
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(1)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback], verbose=0, one_batch=True)
self.assertTrue(callback.on_start.call_count == 1)
self.assertTrue(callback.on_sample.call_count == 1)
self.assertTrue(callback.on_sample_validation.call_count == 1)
def test_replay_metrics(self):
t = Trial(MagicMock())
callback = MagicMock()
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback], verbose=0)
self.assertTrue(callback.on_sample.call_args_list[0][0][0][torchbearer.METRICS]['test'] == 9)
self.assertTrue(callback.on_sample_validation.call_args_list[0][0][0][torchbearer.METRICS]['val_test2'] == 10)
def test_replay_stop_training(self):
t = Trial(MagicMock())
callback = MagicMock()
@torchbearer.callbacks.on_sample
def stop_training(state):
state[torchbearer.STOP_TRAINING] = True
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback, stop_training], verbose=0)
self.assertTrue(callback.on_sample.call_count == 10)
callback.on_sample_validation.assert_not_called()
def test_replay_stop_training_on_validation(self):
t = Trial(MagicMock())
callback = MagicMock()
@torchbearer.callbacks.on_sample_validation
def stop_training(state):
state[torchbearer.STOP_TRAINING] = True
history = [{'train_steps': 10, 'validation_steps': 5, 'test': i, 'val_test2': i+1} for i in range(10)]
t.state[torchbearer.HISTORY] = history
t.replay(callbacks=[callback, stop_training], verbose=0)
self.assertTrue(callback.on_sample_validation.call_count == 1)
class TestTrialMembers(TestCase):
def test_init_none_criterion(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
optimizer = MagicMock()
metric = MagicMock()
torchbearertrial = Trial(torchmodel, optimizer, None, [metric], []).to('cpu', torch.float64)
loss = torchbearertrial.state[torchbearer.CRITERION](None, None)
self.assertTrue(str(loss.device) == 'cpu')
self.assertTrue(loss.dtype == torch.float64)
self.assertTrue(torch.is_tensor(loss))
self.assertTrue(loss.shape == torch.Size([1]))
self.assertTrue(loss.item() == 0)
def test_init_none_criterion_add(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
optimizer = MagicMock()
metric = MagicMock()
torchbearertrial = Trial(torchmodel, optimizer, None, [metric], []).to('cpu', torch.float64)
loss = torchbearertrial.state[torchbearer.CRITERION](None, None)
loss = loss + 1
self.assertTrue(str(loss.device) == 'cpu')
self.assertTrue(loss.dtype == torch.float64)
self.assertTrue(torch.is_tensor(loss))
self.assertTrue(loss.shape == torch.Size([1]))
self.assertTrue(loss.item() == 1)
def test_str(self):
torchmodel = "mod"
optimizer = "opt"
metric = torchbearer.metrics.Metric('met')
cb = torchbearer.callbacks.Callback()
cb.on_init = Mock()
torchbearertrial = Trial(torchmodel, optimizer, "crit", [metric], [cb])
correct_string = "--------------------- OPTIMZER ---------------------\nopt\n\n-------------------- CRITERION ---------------------\ncrit\n\n--------------------- METRICS ----------------------\n['met']\n\n-------------------- CALLBACKS ---------------------\n['torchbearer.bases.Callback']\n\n---------------------- MODEL -----------------------\nmod\n\n"
self.assertEqual(str(torchbearertrial), correct_string)
self.assertEqual(cb.on_init.call_count, 1)
def test_repr(self):
torchmodel = "mod"
optimizer = "opt"
metric = torchbearer.metrics.Metric('met')
torchbearertrial = Trial(torchmodel, optimizer, "crit", [metric], [torchbearer.callbacks.Callback()])
self.assertEqual(str(torchbearertrial), repr(torchbearertrial))
def test_train(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
optimizer = MagicMock()
metric = MagicMock()
torchbearertrial = Trial(torchmodel, optimizer, None, [metric], [])
torchbearertrial.train()
self.assertTrue(torchbearertrial.state[torchbearer.MODEL].training == True)
self.assertEqual(metric.train.call_count, 1)
def test_eval(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
optimizer = MagicMock()
metric = MagicMock()
torchbearertrial = Trial(torchmodel, optimizer, None, [metric], [])
torchbearertrial.eval()
self.assertTrue(torchbearertrial.state[torchbearer.MODEL].training == False)
self.assertEqual(metric.eval.call_count, 1)
def test_to_both_args(self):
dev = 'cuda:1'
dtype = torch.float16
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.to = Mock()
optimizer = torch.optim.Adam(torchmodel.parameters(), 0.1)
state_tensor = torch.Tensor([1])
state_tensor.to = Mock()
optimizer.state = {'test': {'test': state_tensor}}
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.to(dev, dtype)
self.assertTrue(torchmodel.to.call_args[0][0] == dev)
self.assertTrue(torchmodel.to.call_args[0][1] == dtype)
self.assertTrue(state_tensor.to.call_args[0][0] == dev)
self.assertTrue(state_tensor.to.call_args[0][1] == dtype)
def test_to_only_device(self):
dev = 'cuda:1'
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.to = Mock()
optimizer = torch.optim.Adam(torchmodel.parameters(), 0.1)
state_tensor = torch.Tensor([1])
state_tensor.to = Mock()
optimizer.state = {'test': {'test': state_tensor}}
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.to(dev)
self.assertTrue(torchmodel.to.call_args[0][0] == dev)
self.assertTrue(state_tensor.to.call_args[0][0] == dev)
def test_to_only_dtype(self):
dtype = torch.float16
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.to = Mock()
optimizer = torch.optim.Adam(torchmodel.parameters(), 0.1)
state_tensor = torch.Tensor([1])
state_tensor.to = Mock()
optimizer.state = {'test': {'test': state_tensor}}
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.to(dtype)
self.assertTrue(torchmodel.to.call_args[0][0] == dtype)
self.assertTrue(state_tensor.to.call_args[0][0] == dtype)
def test_to_kwargs(self):
dev = 'cuda:1'
dtype = torch.float16
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.to = Mock()
optimizer = torch.optim.Adam(torchmodel.parameters(), 0.1)
state_tensor = torch.Tensor([1])
state_tensor.to = Mock()
optimizer.state = {'test': {'test': state_tensor}}
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.to(device=dev, dtype=dtype)
self.assertTrue(torchmodel.to.call_args[1]['device'] == dev)
self.assertTrue(torchmodel.to.call_args[1]['dtype'] == dtype)
self.assertTrue(state_tensor.to.call_args[1]['device'] == dev)
self.assertTrue(state_tensor.to.call_args[1]['dtype'] == dtype)
@patch('torch.cuda.current_device')
def test_cuda_no_device(self, device_mock):
device_mock.return_value = 111
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.load_state_dict = Mock()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer.load_state_dict = Mock()
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.to = Mock()
torchbearertrial.cuda()
self.assertTrue(torchbearertrial.to.call_args[0][0] == 'cuda:' + str(111))
def test_cuda_with_device(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.load_state_dict = Mock()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer.load_state_dict = Mock()
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.to = Mock()
torchbearertrial.cuda(device='2')
self.assertTrue(torchbearertrial.to.call_args[0][0] == 'cuda:2')
def test_cpu(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.load_state_dict = Mock()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer.load_state_dict = Mock()
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.to = Mock()
torchbearertrial.cpu()
self.assertTrue(torchbearertrial.to.call_args[0][0] == 'cpu')
def test_load_state_dict_resume(self):
key_words = {'strict': True}
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.load_state_dict = Mock()
torch_state = torchmodel.state_dict()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer.load_state_dict = Mock()
optimizer_state = optimizer.state_dict()
callback_list = MagicMock()
callback_list.state_dict = Mock(return_value = 1)
history = ['test']
torchbearertrial = Trial(torchmodel, optimizer, None, [], [])
torchbearertrial.state[torchbearer.CALLBACK_LIST] = callback_list
torchbearertrial.state[torchbearer.HISTORY] = history
torchbearer_state = torchbearertrial.state_dict()
torchbearertrial.state[torchbearer.HISTORY] = 'Wrong'
torchbearertrial.load_state_dict(torchbearer_state, **key_words)
self.assertTrue(torchmodel.load_state_dict.call_args[0][0] == torch_state)
self.assertTrue(optimizer.load_state_dict.call_args[0][0] == optimizer_state)
self.assertTrue(optimizer.load_state_dict.call_args[0][0] == optimizer_state)
self.assertTrue(callback_list.load_state_dict.call_args[0][0] == 1)
self.assertTrue(torchbearertrial.state[torchbearer.HISTORY] == history)
self.assertEqual(torchbearertrial.state[torchbearer.MODEL].load_state_dict.call_count, 1)
self.assertEqual(torchbearertrial.state[torchbearer.OPTIMIZER].load_state_dict.call_count, 1)
self.assertEqual(torchbearertrial.state[torchbearer.CALLBACK_LIST].load_state_dict.call_count, 1)
self.assertTrue(torchmodel.load_state_dict.call_args[1] == key_words)
def test_load_state_dict_no_resume(self):
key_words = {'strict': True}
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel.load_state_dict = Mock()
torch_state = torchmodel.state_dict()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer.load_state_dict = Mock()
optimizer_state = optimizer.state_dict()
history = ['test']
torchbearertrial = Trial(torchmodel, optimizer, None, [], [])
torchbearertrial.state[torchbearer.HISTORY] = history
torchbearer_state = torchbearertrial.state_dict()
torchbearertrial.state[torchbearer.HISTORY] = 'Wrong'
torchbearertrial.load_state_dict(torchbearer_state, resume=False, **key_words)
self.assertTrue(torchbearertrial.state[torchbearer.HISTORY] is 'Wrong')
self.assertEqual(torchbearertrial.state[torchbearer.MODEL].load_state_dict.call_count, 1)
self.assertTrue(torchbearertrial.state[torchbearer.OPTIMIZER].load_state_dict.call_count == 0)
self.assertTrue(torchmodel.load_state_dict.call_args[1] == key_words)
def test_load_state_dict_wrong_version(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1, 1))
torchmodel.load_state_dict = Mock()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer.load_state_dict = Mock()
torchbearertrial = Trial(torchmodel, optimizer, None, [], [])
torchbearer_state = torchbearertrial.state_dict()
torchbearer_state[torchbearer.VERSION] = '0.1.7' # Old version
import warnings
with warnings.catch_warnings(record=True) as w:
torchbearertrial.load_state_dict(torchbearer_state, resume=True)
self.assertTrue(len(w) == 1)
self.assertTrue(issubclass(w[-1].category, UserWarning))
def test_load_state_dict_not_torchbearer(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1, 1))
torchmodel.load_state_dict = Mock()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer.load_state_dict = Mock()
torchbearertrial = Trial(torchmodel, optimizer, None, [], [])
torchbearer_state = torchbearertrial.state_dict()
torchbearer_state[torchbearer.VERSION] = '0.1.7' # Old version
import warnings
with warnings.catch_warnings(record=True) as w:
torchbearertrial.load_state_dict(torchbearer_state[torchbearer.MODEL])
self.assertTrue(len(w) == 1)
self.assertTrue(issubclass(w[-1].category, UserWarning))
self.assertEqual(torchmodel.load_state_dict.call_count, 1)
optimizer.load_state_dict.assert_not_called()
def test_state_dict(self):
torchmodel = torch.nn.Sequential(torch.nn.Linear(1,1))
torchmodel_state = torchmodel.state_dict()
optimizer = torch.optim.SGD(torchmodel.parameters(), 0.1)
optimizer_state = optimizer.state_dict()
callback_list = MagicMock()
callback_list.state_dict = Mock(return_value = 1)
history = ['test']
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.state[torchbearer.HISTORY] = history
torchbearertrial.state[torchbearer.CALLBACK_LIST] = callback_list
torchbearer_state = torchbearertrial.state_dict()
self.assertTrue(torchbearer_state[torchbearer.VERSION] == torchbearer.__version__.replace('.dev', ''))
self.assertTrue(torchbearer_state[torchbearer.MODEL] == torchmodel_state)
self.assertTrue(torchbearer_state[torchbearer.OPTIMIZER] == optimizer_state)
self.assertTrue(torchbearer_state[torchbearer.CALLBACK_LIST] == 1)
self.assertTrue(torchbearer_state[torchbearer.HISTORY] == history)
def test_state_dict_kwargs(self):
keywords = {'destination': None, 'prefix': '', 'keep_vars': False}
torchmodel = MagicMock()
optimizer = MagicMock()
torchbearertrial = Trial(torchmodel, optimizer, torch.nn.L1Loss(), [])
torchbearertrial.state_dict(**keywords)
self.assertTrue(torchmodel.state_dict.call_args[1] == keywords)
self.assertTrue(optimizer.state_dict.call_args[1] == {})
class TestTrialFunctions(TestCase):
@patch('torchbearer.trial.Tqdm')
def test_get_printer_no_tqdm(self, tq):
verbose = 0
validation_label_letter = 'v'
printer = torchbearer.trial.get_printer(verbose=verbose, validation_label_letter=validation_label_letter)
tq.assert_not_called()
@patch('torchbearer.trial.Tqdm')
def test_get_printer_verbose_1(self, tq):
verbose = 1
validation_label_letter = 'v'
printer = torchbearer.trial.get_printer(verbose=verbose, validation_label_letter=validation_label_letter)
tq.assert_called_once_with(on_epoch=True, validation_label_letter=validation_label_letter)
@patch('torchbearer.trial.Tqdm')
def test_get_printer_verbose_2(self, tq):
verbose = 2
validation_label_letter = 'v'
printer = torchbearer.trial.get_printer(verbose=verbose, validation_label_letter=validation_label_letter)
tq.assert_called_once_with(validation_label_letter=validation_label_letter)
@patch('torchbearer.trial.Tqdm')
def test_get_printer_letter(self, tq):
verbose = 2
validation_label_letter = 'r'
printer = torchbearer.trial.get_printer(verbose=verbose, validation_label_letter=validation_label_letter)
tq.assert_called_once_with(validation_label_letter=validation_label_letter)
@patch('torchbearer.trial.get_printer')
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_printer_no_tqdm(self, c_inj, get_print_mock):
callback_list = torchbearer.callbacks.CallbackList([])
class SomeClass:
@torchbearer.inject_printer('v')
def test_func(self, verbose=0):
pass
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list}
t.test_func(verbose=0)
self.assertEqual(c_inj.call_count, 1)
get_print_mock.assert_called_once_with(validation_label_letter='v', verbose=0)
@patch('torchbearer.trial.get_printer')
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_printer_no_kwargs(self, c_inj, get_print_mock):
callback_list = torchbearer.callbacks.CallbackList([])
class SomeClass:
@torchbearer.inject_printer('v')
def test_func(self, verbose=0):
pass
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list}
t.test_func(1)
self.assertEqual(c_inj.call_count, 1)
get_print_mock.assert_called_once_with(validation_label_letter='v', verbose=1)
@patch('torchbearer.trial.get_printer')
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_both(self, c_inj, get_print_mock):
callback_list = torchbearer.callbacks.CallbackList([])
generator = MagicMock()
steps = None
class SomeClass:
@torchbearer.inject_printer('v')
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_standard)
def test_func(self, verbose=0):
pass
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list, torchbearer.GENERATOR: (generator, steps), torchbearer.LOADER: None}
t.test_func(1)
self.assertEqual(c_inj.call_count, 1)
get_print_mock.assert_called_once_with(validation_label_letter='v', verbose=1)
@patch('torchbearer.trial.get_printer')
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_printer_tqdm_on_epoch(self, c_inj, get_print_mock):
callback_list = torchbearer.callbacks.CallbackList([])
class SomeClass:
@torchbearer.inject_printer('t')
def test_func(self, verbose=0):
pass
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list}
t.test_func(verbose=1)
self.assertEqual(c_inj.call_count, 1)
get_print_mock.assert_called_once_with(validation_label_letter='t', verbose=1)
@patch('torchbearer.trial.get_printer')
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_printer_tqdm_on_batch(self, c_inj, get_print_mock):
callback_list = torchbearer.callbacks.CallbackList([])
class SomeClass:
@torchbearer.inject_printer('t')
def test_func(self, verbose=0):
pass
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list}
t.test_func(verbose=2)
self.assertEqual(c_inj.call_count, 1)
get_print_mock.assert_called_once_with(validation_label_letter='t', verbose=2)
@patch('torchbearer.trial.get_printer')
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_printer_tqdm_default(self, c_inj, get_print_mock):
callback_list = torchbearer.callbacks.CallbackList([])
class SomeClass:
@torchbearer.inject_printer('t')
def test_func(self, verbose=2):
pass
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list}
t.test_func()
self.assertEqual(c_inj.call_count, 1)
get_print_mock.assert_called_once_with(validation_label_letter='t', verbose=2)
@patch('torchbearer.trial.Tqdm')
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_printer_injection(self, c_inj, tq):
callback_list = torchbearer.callbacks.CallbackList([])
class SomeClass:
@torchbearer.inject_printer('v')
def test_func(self_inner, verbose=0):
self.assertEqual(c_inj.call_count, 1)
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list}
t.test_func()
self.assertTrue(t.state[torchbearer.CALLBACK_LIST] == callback_list)
def test_inject_sampler_standard(self):
generator = MagicMock()
steps = None
class SomeClass:
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_standard)
def test_func(self):
pass
t = SomeClass()
t.state = {torchbearer.GENERATOR: (generator, steps), torchbearer.LOADER: None}
t.test_func()
self.assertTrue(t.state[torchbearer.SAMPLER] == torchbearer.trial.load_batch_standard)
def test_inject_sampler_none(self):
generator = None
steps = None
class SomeClass:
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_standard)
def test_func(self):
pass
t = SomeClass()
t.state = {torchbearer.GENERATOR: (generator, steps), torchbearer.LOADER: None}
t.test_func()
self.assertTrue(t.state[torchbearer.SAMPLER] == torchbearer.trial.load_batch_none)
def test_inject_sampler_predict(self):
generator = MagicMock()
steps = None
class SomeClass:
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_predict)
def test_func(self):
pass
t = SomeClass()
t.state = {torchbearer.GENERATOR: (generator, steps), torchbearer.LOADER: None}
t.test_func()
self.assertTrue(t.state[torchbearer.SAMPLER] == torchbearer.trial.load_batch_predict)
def test_inject_sampler_custom(self):
generator = MagicMock()
steps = None
class SomeClass:
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_predict)
def test_func(self):
pass
def some_loader(state):
return 'test'
t = SomeClass()
t.state = {torchbearer.GENERATOR: (generator, steps), torchbearer.LOADER: some_loader}
t.test_func()
self.assertTrue(t.state[torchbearer.SAMPLER] == some_loader)
@patch('warnings.warn')
@patch('torchbearer.trial.load_batch_infinite')
def test_inject_sampler_infinite(self, mock_lbi, _):
generator = MagicMock()
steps = -1
class SomeClass:
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_predict)
def test_func(self):
pass
t = SomeClass()
t.state = {torchbearer.GENERATOR: (generator, steps), torchbearer.LOADER: None}
t.test_func()
self.assertTrue(mock_lbi.call_args[0][0] == load_batch_predict)
@patch('torchbearer.trial.load_batch_infinite')
def test_inject_sampler_infinite_standard_loader(self, mock_lbi):
class EmptyObj: # Mocks don't play well with hasattr so need an empty object
def __len__(self):
return 100
def __iter__(self):
return self
def __next__(self):
return None
generator = EmptyObj()
steps = 10
class SomeClass:
@torchbearer.inject_sampler(torchbearer.TRAIN_DATA, load_batch_standard)
def test_func(self):
pass
t = SomeClass()
t.state = {torchbearer.TRAIN_DATA: (generator, steps), torchbearer.INF_TRAIN_LOADING: True, torchbearer.LOADER: None}
t.test_func()
self.assertTrue(mock_lbi.call_args[0][0] == load_batch_standard)
self.assertTrue(generator.tb_iter)
@patch('torchbearer.trial.load_batch_infinite')
def test_inject_sampler_infinite_train_loading(self, mock_lbi):
generator = MagicMock()
generator.__len__.return_value = 10
steps = 5
class SomeClass:
@torchbearer.inject_sampler(torchbearer.TRAIN_DATA, load_batch_standard)
def test_func(self):
pass
t = SomeClass()
t.state = {torchbearer.TRAIN_DATA: (generator, steps), torchbearer.INF_TRAIN_LOADING: True, torchbearer.LOADER: None}
t.test_func()
self.assertTrue(mock_lbi.call_args[0][0] == load_batch_standard)
def test_inject_sampler_data_key(self):
generator = MagicMock()
test_generator = 'test'
test_steps = 1
class SomeClass:
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_predict)
def test_func(self, data_key=None):
pass
t = SomeClass()
t.state = {torchbearer.GENERATOR: (generator, None), torchbearer.TEST_GENERATOR: (test_generator, test_steps), torchbearer.LOADER: None}
t.test_func(data_key=torchbearer.TEST_GENERATOR)
self.assertTrue(t.state[torchbearer.GENERATOR] == test_generator)
self.assertTrue(t.state[torchbearer.STEPS] == test_steps)
def test_inject_sampler_data_key_no_kwargs(self):
generator = MagicMock()
test_generator = 'test'
test_steps = 1
class SomeClass:
@torchbearer.inject_sampler(torchbearer.GENERATOR, load_batch_predict)
def test_func(self, data_key=None):
pass
t = SomeClass()
t.state = {torchbearer.GENERATOR: (generator, None), torchbearer.TEST_GENERATOR: (test_generator, test_steps), torchbearer.LOADER: None}
t.test_func(torchbearer.TEST_GENERATOR)
self.assertTrue(t.state[torchbearer.GENERATOR] == test_generator)
self.assertTrue(t.state[torchbearer.STEPS] == test_steps)
@patch('torchbearer.trial.CallbackListInjection')
def test_inject_callback(self, c_inj):
callback_list = torchbearer.callbacks.CallbackList([])
test_callback = MagicMock()
class SomeClass:
@torchbearer.inject_callback(test_callback)
def test_func(self_inner):
self.assertEqual(c_inj.call_count, 1)
t = SomeClass()
t.state = {torchbearer.CALLBACK_LIST: callback_list}
t.test_func()
self.assertTrue(c_inj.call_args[0][0] == test_callback)
def test_deep_to_tensor(self):
base_tensor = torch.Tensor([1])
tensor = MagicMock(spec=base_tensor)
new_dtype = torch.float16
new_device = 'cuda:1'
deep_to(tensor, new_device, new_dtype)
self.assertTrue(tensor.to.call_args[0][0] == new_device)
self.assertTrue(tensor.to.call_args[0][1] == new_dtype)
def test_deep_to_tensor_int_dtype(self):
base_tensor = torch.Tensor([1])
tensor = MagicMock(spec=base_tensor)
tensor.dtype = torch.uint8
new_device = 'cuda:1'
new_dtype = torch.uint8
deep_to(tensor, new_device, new_dtype)
self.assertTrue(tensor.to.call_args[0][0] == new_device)
self.assertTrue(len(tensor.to.call_args[0]) == 1)
def test_deep_to_list(self):
base_tensor = torch.Tensor([1])
tensor_1 = MagicMock(spec=base_tensor)
tensor_2 = MagicMock(spec=base_tensor)
tensors = [tensor_1, tensor_2]
new_dtype = torch.float16
new_device = 'cuda:1'
deep_to(tensors, new_device, new_dtype)
for tensor in tensors:
self.assertTrue(tensor.to.call_args[0][0] == new_device)
self.assertTrue(tensor.to.call_args[0][1] == new_dtype)
def test_deep_to_dict(self):
tensor_1 = torch.Tensor([0])
tensor_1.to = Mock()
tensor_2 = torch.Tensor([0])
tensor_2.to = Mock()
tensors = {'t1': tensor_1, 't2': tensor_2}
new_dtype = torch.float16
new_device = 'cuda:1'
deep_to(tensors, new_device, new_dtype)
self.assertTrue(tensor_1.to.call_args[0][0] == new_device)
self.assertTrue(tensor_1.to.call_args[0][1] == new_dtype)
self.assertTrue(tensor_2.to.call_args[0][0] == new_device)
self.assertTrue(tensor_2.to.call_args[0][1] == new_dtype)
def test_deep_to_unknown_object(self):
tensor_1 = MagicMock()
tensor_2 = MagicMock()
tensors = {'t1': tensor_1, 't2': tensor_2}
new_dtype = torch.float16
new_device = 'cuda:1'
deep_to(tensors, new_device, new_dtype)
self.assertTrue(tensor_1.to.call_args is None)
self.assertTrue(tensor_2.to.call_args is None)
def test_load_batch_standard(self):
items = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2]))]
iterator = iter(items)
state = {torchbearer.ITERATOR: iterator, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.int}
load_batch_standard(state)
self.assertTrue(state[torchbearer.X].item() == items[0][0].item())
self.assertTrue(state[torchbearer.Y_TRUE].item() == items[0][1].item())
def test_load_batch_inf_standard_normal(self):
items = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
iterator = iter(items)
state = {torchbearer.ITERATOR: iterator, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.int}
loader = load_batch_infinite(load_batch_standard)
for i in range(2):
loader(state)
self.assertTrue(state[torchbearer.X].item() == items[1][0].item())
self.assertTrue(state[torchbearer.Y_TRUE].item() == items[1][1].item())
def test_load_batch_inf_standard_too_many(self):
items = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2])), (torch.Tensor([3]), torch.Tensor([3]))]
iterator = iter(items)
state = {torchbearer.GENERATOR: items, torchbearer.ITERATOR: iterator, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.int}
loader = load_batch_infinite(load_batch_standard)
for i in range(12):
loader(state)
self.assertTrue(state[torchbearer.X].item() == items[2][0].item())
self.assertTrue(state[torchbearer.Y_TRUE].item() == items[2][1].item())
def test_load_batch_none(self):
items = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2]))]
iterator = iter(items)
state = {torchbearer.ITERATOR: iterator, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.int}
load_batch_none(state)
self.assertTrue(state[torchbearer.X] is None)
self.assertTrue(state[torchbearer.Y_TRUE] is None)
def test_load_batch_predict_data(self):
items = [torch.Tensor([1]), torch.Tensor([2])]
iterator = iter(items)
state = {torchbearer.ITERATOR: iterator, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.int}
load_batch_predict(state)
self.assertTrue(state[torchbearer.X].item() == items[0].item())
def test_load_batch_predict_list(self):
items = [(torch.Tensor([1]), torch.Tensor([1])), (torch.Tensor([2]), torch.Tensor([2]))]
iterator = iter(items)
state = {torchbearer.ITERATOR: iterator, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.int}
load_batch_predict(state)
self.assertTrue(state[torchbearer.X].item() == items[0][0].item())
self.assertTrue(state[torchbearer.Y_TRUE].item() == items[0][1].item())
def test_update_device_and_dtype_only_kwarg(self):
main_state = {}
dtype = torch.float16
dev = 'cuda:1'
kwargs = {str(torchbearer.DEVICE): dev, str(torchbearer.DATA_TYPE): dtype}
main_state = update_device_and_dtype(main_state, **kwargs)
self.assertTrue(main_state[torchbearer.DATA_TYPE] == dtype)
self.assertTrue(main_state[torchbearer.DEVICE] == dev)
def test_update_device_and_dtype_only_arg(self):
main_state = {}
dtype = torch.float16
dev = 'cuda:1'
args = (dtype, dev)
main_state = update_device_and_dtype(main_state, *args)
self.assertTrue(main_state[torchbearer.DATA_TYPE] == dtype)
self.assertTrue(main_state[torchbearer.DEVICE] == dev)
def test_new_iter_none(self):
generator = None
t = Trial(None)
out = t._new_iter(generator)
self.assertTrue(out is None)
def test_new_iter_standard(self):
class EmptyObj(object):
def __init__(self):
super(self.__class__, self).__init__()
self.count = 0
def __iter__(self):
self.count += 1
return iter([1,2,3])
generator = EmptyObj()
t = Trial(None)
_ = t._new_iter(generator)
self.assertTrue(generator.count == 1)
self.assertTrue(not hasattr(generator, 'inf'))
def test_new_iter_inf(self):
class EmptyObj(object):
def __init__(self):
super(self.__class__, self).__init__()
self.count = 0
self.tb_iter = Mock()
self.inf = True
def __iter__(self):
self.count += 1
return iter([1,2,3])
generator = EmptyObj()
t = Trial(None)
out = t._new_iter(generator)
self.assertTrue(out == generator.tb_iter)
self.assertTrue(generator.count == 0)
| 42.961879
| 364
| 0.664543
| 13,849
| 126,222
| 5.832912
| 0.022673
| 0.037584
| 0.023397
| 0.041594
| 0.916415
| 0.896076
| 0.877049
| 0.860423
| 0.84256
| 0.817863
| 0
| 0.013279
| 0.218995
| 126,222
| 2,937
| 365
| 42.976507
| 0.806157
| 0.000998
| 0
| 0.745902
| 0
| 0.000431
| 0.026696
| 0.012594
| 0
| 0
| 0
| 0
| 0.139776
| 1
| 0.077653
| false
| 0.04918
| 0.004745
| 0.00302
| 0.100949
| 0.015099
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49628921775ee8074ab0fded16b671754b5b178a
| 20,111
|
py
|
Python
|
raiden_contracts/tests/test_channel_withdraw.py
|
agatsoh/raiden-contracts
|
7e26e693063633e9a1bd27981995bb252c5d1b51
|
[
"MIT"
] | null | null | null |
raiden_contracts/tests/test_channel_withdraw.py
|
agatsoh/raiden-contracts
|
7e26e693063633e9a1bd27981995bb252c5d1b51
|
[
"MIT"
] | 44
|
2021-07-29T07:14:22.000Z
|
2022-03-29T07:15:41.000Z
|
raiden_contracts/tests/test_channel_withdraw.py
|
agatsoh/raiden-contracts
|
7e26e693063633e9a1bd27981995bb252c5d1b51
|
[
"MIT"
] | null | null | null |
from typing import Callable
import pytest
from eth_tester.exceptions import TransactionFailed
from web3 import Web3
from web3.contract import Contract
from web3.exceptions import ValidationError
from raiden_contracts.constants import (
EMPTY_ADDRESS,
TEST_SETTLE_TIMEOUT_MIN,
ChannelEvent,
ChannelState,
)
from raiden_contracts.tests.utils import (
EMPTY_ADDITIONAL_HASH,
EMPTY_BALANCE_HASH,
EMPTY_SIGNATURE,
LOCKSROOT_OF_NO_LOCKS,
UINT256_MAX,
call_and_transact,
)
from raiden_contracts.tests.utils.blockchain import mine_blocks
from raiden_contracts.utils.events import check_withdraw
def test_withdraw_call(
token_network: Contract,
create_channel_and_deposit: Callable,
get_accounts: Callable,
create_withdraw_signatures: Callable,
) -> None:
"""setTotalWithdraw() fails with various wrong arguments"""
(A, B) = get_accounts(2)
withdraw_A = 3
channel_identifier = create_channel_and_deposit(A, B, 10, 1)
(signature_A_for_A, signature_B_for_A) = create_withdraw_signatures(
[A, B], channel_identifier, A, withdraw_A, UINT256_MAX
)
# Failure with zero (integer) instead of an address
with pytest.raises(ValidationError):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=0x0,
total_withdraw=withdraw_A,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
)
# Failure with the empty string instead of an address
with pytest.raises(ValidationError):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant="",
total_withdraw=withdraw_A,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
)
# Failure with a negative number as the total withdrawn amount
with pytest.raises(ValidationError):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=A,
total_withdraw=-1,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
)
# Failure with an overflown number as the total withdrawn amount
with pytest.raises(ValidationError):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=A,
total_withdraw=UINT256_MAX + 1,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
)
# Failure with the zero address insted of a participant's address
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=EMPTY_ADDRESS,
total_withdraw=withdraw_A,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
).call({"from": A})
# Failure with zero as the total withdrawn amount
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=A,
total_withdraw=0,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
).call({"from": A})
# Failure with the empty signature instead of A's
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=A,
total_withdraw=withdraw_A,
expiration_block=UINT256_MAX,
participant_signature=EMPTY_SIGNATURE,
partner_signature=signature_B_for_A,
).call({"from": A})
# Failure with the empty signature instead of B's
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=A,
total_withdraw=withdraw_A,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=EMPTY_SIGNATURE,
).call({"from": A})
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=A,
total_withdraw=withdraw_A,
expiration_block=UINT256_MAX,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
),
{"from": A},
)
def test_withdraw_call_near_expiration(
token_network: Contract,
create_channel_and_deposit: Callable,
get_accounts: Callable,
create_withdraw_signatures: Callable,
web3: Web3,
) -> None:
"""setTotalWithdraw() succeeds when expiration_block is one block in the future"""
(A, B) = get_accounts(2)
withdraw_A = 3
channel_identifier = create_channel_and_deposit(A, B, 10, 1)
# The block must still be one block in the future when the transaction is
# processed, so we have to choose an expiration two block in the future
expiration = web3.eth.block_number + 2
(signature_A_for_A, signature_B_for_A) = create_withdraw_signatures(
[A, B], channel_identifier, A, withdraw_A, expiration
)
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier=channel_identifier,
participant=A,
total_withdraw=withdraw_A,
expiration_block=expiration,
participant_signature=signature_A_for_A,
partner_signature=signature_B_for_A,
),
{"from": A},
)
def test_withdraw_wrong_state(
web3: Web3,
token_network: Contract,
create_channel_and_deposit: Callable,
get_accounts: Callable,
withdraw_channel: Callable,
create_close_signature_for_no_balance_proof: Callable,
) -> None:
"""setTotalWithdraw() should fail on a closed or settled channel"""
(A, B) = get_accounts(2)
withdraw_A = 1
assert token_network.functions.getChannelIdentifier(A, B).call() == 0
channel_identifier = create_channel_and_deposit(A, B, 10, 14, TEST_SETTLE_TIMEOUT_MIN)
(_, state) = token_network.functions.getChannelInfo(channel_identifier, A, B).call()
assert state == ChannelState.OPENED
# Channel is open, withdraw must work
withdraw_channel(channel_identifier, A, withdraw_A, UINT256_MAX, B)
closing_sig = create_close_signature_for_no_balance_proof(A, channel_identifier)
call_and_transact(
token_network.functions.closeChannel(
channel_identifier=channel_identifier,
non_closing_participant=B,
closing_participant=A,
balance_hash=EMPTY_BALANCE_HASH,
nonce=0,
additional_hash=EMPTY_ADDITIONAL_HASH,
non_closing_signature=EMPTY_SIGNATURE,
closing_signature=closing_sig,
),
{"from": A},
)
(_, state) = token_network.functions.getChannelInfo(channel_identifier, A, B).call()
assert state == ChannelState.CLOSED
with pytest.raises(TransactionFailed):
withdraw_channel(channel_identifier, A, withdraw_A, UINT256_MAX, B)
mine_blocks(web3, TEST_SETTLE_TIMEOUT_MIN + 1)
call_and_transact(
token_network.functions.settleChannel(
channel_identifier,
A,
0,
0,
LOCKSROOT_OF_NO_LOCKS,
B,
0,
0,
LOCKSROOT_OF_NO_LOCKS,
),
{"from": A},
)
(_, state) = token_network.functions.getChannelInfo(channel_identifier, A, B).call()
assert state == ChannelState.REMOVED
with pytest.raises(TransactionFailed):
withdraw_channel(channel_identifier, A, withdraw_A, UINT256_MAX, B)
def test_withdraw_bigger(
create_channel_and_deposit: Callable, get_accounts: Callable, withdraw_channel: Callable
) -> None:
(A, B) = get_accounts(2)
deposit_A = 15
deposit_B = 13
channel_identifier = create_channel_and_deposit(A, B, deposit_A, deposit_B)
with pytest.raises(TransactionFailed):
withdraw_channel(channel_identifier, A, deposit_A + deposit_B + 1, UINT256_MAX, B)
with pytest.raises(TransactionFailed):
withdraw_channel(channel_identifier, B, deposit_A + deposit_B + 1, UINT256_MAX, A)
withdraw_channel(channel_identifier, A, 3, UINT256_MAX, B)
withdraw_channel(channel_identifier, B, 6, UINT256_MAX, A)
with pytest.raises(TransactionFailed):
withdraw_channel(channel_identifier, A, deposit_A + deposit_B - 5, UINT256_MAX, B)
with pytest.raises(TransactionFailed):
withdraw_channel(channel_identifier, B, deposit_A + deposit_B - 2, UINT256_MAX, A)
withdraw_channel(channel_identifier, A, deposit_A + deposit_B - 7, UINT256_MAX, B)
def test_withdraw_wrong_signers(
token_network: Contract,
create_channel_and_deposit: Callable,
get_accounts: Callable,
create_withdraw_signatures: Callable,
) -> None:
(A, B, C) = get_accounts(3)
deposit_A = 15
deposit_B = 13
withdraw_A = 5
channel_identifier = create_channel_and_deposit(A, B, deposit_A, deposit_B)
(
signature_A_for_A,
signature_B_for_A,
signature_C_for_A,
) = create_withdraw_signatures([A, B, C], channel_identifier, A, withdraw_A, UINT256_MAX)
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_C_for_A,
signature_B_for_A,
).call({"from": C})
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_C_for_A,
).call({"from": C})
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_B_for_A,
),
{"from": C},
)
def test_withdraw_wrong_signature_content(
token_network: Contract,
create_channel_and_deposit: Callable,
get_accounts: Callable,
create_withdraw_signatures: Callable,
web3: Web3,
) -> None:
(A, B, C) = get_accounts(3)
deposit_A = 15
deposit_B = 13
withdraw_A = 3
channel_identifier = create_channel_and_deposit(A, B, deposit_A, deposit_B)
channel_identifier_fake = token_network.functions.getChannelIdentifier(A, C).call()
(signature_A_for_A, signature_B_for_A) = create_withdraw_signatures(
[A, B], channel_identifier, A, withdraw_A, UINT256_MAX
)
(signature_A_for_A_fake1, signature_B_for_A_fake1) = create_withdraw_signatures(
[A, B], channel_identifier_fake, A, withdraw_A, UINT256_MAX
)
(signature_A_for_A_fake2, signature_B_for_A_fake2) = create_withdraw_signatures(
[A, B], channel_identifier, B, withdraw_A, UINT256_MAX
)
(signature_A_for_A_fake3, signature_B_for_A_fake3) = create_withdraw_signatures(
[A, B], channel_identifier, A, withdraw_A - 1, UINT256_MAX
)
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A_fake1,
signature_B_for_A,
).call({"from": A})
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_B_for_A_fake1,
).call({"from": A})
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A_fake2,
signature_B_for_A,
).call({"from": A})
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_B_for_A_fake2,
).call({"from": A})
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A_fake3,
signature_B_for_A,
).call({"from": A})
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_B_for_A_fake3,
).call({"from": A})
with pytest.raises(TransactionFailed):
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
0,
signature_A_for_A,
signature_B_for_A,
),
{"from": A},
)
with pytest.raises(TransactionFailed):
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
web3.eth.block_number,
signature_A_for_A,
signature_B_for_A,
),
{"from": A},
)
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_B_for_A,
),
{"from": A},
)
def test_withdraw_channel_state(
get_accounts: Callable,
token_network: Contract,
custom_token: Contract,
create_channel_and_deposit: Callable,
withdraw_channel: Callable,
withdraw_state_tests: Callable,
) -> None:
(A, B, C) = get_accounts(3)
deposit_A = 20
deposit_B = 10
withdraw_A = 15
withdraw_B = 2
channel_identifier = create_channel_and_deposit(A, B, deposit_A, deposit_B)
balance_A = custom_token.functions.balanceOf(A).call()
balance_B = custom_token.functions.balanceOf(B).call()
balance_contract = custom_token.functions.balanceOf(token_network.address).call()
(
_,
withdrawn_amount,
_,
_,
_,
_,
_,
) = token_network.functions.getChannelParticipantInfo(channel_identifier, A, B).call()
assert withdrawn_amount == 0
withdraw_channel(channel_identifier, A, withdraw_A, UINT256_MAX, B, C)
withdraw_state_tests(
channel_identifier,
A,
deposit_A,
withdraw_A,
0,
balance_A,
B,
deposit_B,
0,
balance_B,
balance_contract,
C,
)
balance_A = custom_token.functions.balanceOf(A).call()
balance_B = custom_token.functions.balanceOf(B).call()
balance_contract = custom_token.functions.balanceOf(token_network.address).call()
withdraw_channel(channel_identifier, B, withdraw_B, UINT256_MAX, A)
withdraw_state_tests(
channel_identifier,
B,
deposit_B,
withdraw_B,
0,
balance_B,
A,
deposit_A,
withdraw_A,
balance_A,
balance_contract,
)
balance_A = custom_token.functions.balanceOf(A).call()
balance_B = custom_token.functions.balanceOf(B).call()
balance_contract = custom_token.functions.balanceOf(token_network.address).call()
withdraw_channel(channel_identifier, B, withdraw_B + 3, UINT256_MAX, A)
withdraw_state_tests(
channel_identifier,
B,
deposit_B,
withdraw_B + 3,
withdraw_B,
balance_B,
A,
deposit_A,
withdraw_A,
balance_A,
balance_contract,
)
def test_withdraw_replay_reopened_channel(
web3: Web3,
token_network: Contract,
create_channel: Callable,
channel_deposit: Callable,
get_accounts: Callable,
create_withdraw_signatures: Callable,
create_close_signature_for_no_balance_proof: Callable,
) -> None:
(A, B) = get_accounts(2)
deposit_A = 20
withdraw_A = 5
channel_identifier1 = create_channel(A, B)[0]
channel_deposit(channel_identifier1, A, deposit_A, B)
(signature_A_for_A, signature_B_for_A) = create_withdraw_signatures(
[A, B], channel_identifier1, A, withdraw_A, UINT256_MAX
)
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier1,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_B_for_A,
),
{"from": A},
)
closing_sig = create_close_signature_for_no_balance_proof(B, channel_identifier1)
call_and_transact(
token_network.functions.closeChannel(
channel_identifier=channel_identifier1,
non_closing_participant=A,
closing_participant=B,
balance_hash=EMPTY_BALANCE_HASH,
nonce=0,
additional_hash=EMPTY_ADDITIONAL_HASH,
non_closing_signature=EMPTY_SIGNATURE,
closing_signature=closing_sig,
),
{"from": B},
)
mine_blocks(web3, TEST_SETTLE_TIMEOUT_MIN + 1)
call_and_transact(
token_network.functions.settleChannel(
channel_identifier1,
A,
0,
0,
LOCKSROOT_OF_NO_LOCKS,
B,
0,
0,
LOCKSROOT_OF_NO_LOCKS,
),
{"from": A},
)
# Reopen the channel and make sure we cannot use the old withdraw proof
channel_identifier2 = create_channel(A, B)[0]
channel_deposit(channel_identifier2, A, deposit_A, B)
assert channel_identifier1 != channel_identifier2
with pytest.raises(TransactionFailed):
token_network.functions.setTotalWithdraw(
channel_identifier2,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A,
signature_B_for_A,
).call({"from": A})
# Signed message with correct channel_identifier must work
(signature_A_for_A2, signature_B_for_A2) = create_withdraw_signatures(
[A, B], channel_identifier2, A, withdraw_A, UINT256_MAX
)
call_and_transact(
token_network.functions.setTotalWithdraw(
channel_identifier2,
A,
withdraw_A,
UINT256_MAX,
signature_A_for_A2,
signature_B_for_A2,
),
{"from": A},
)
def test_withdraw_event(
token_network: Contract,
create_channel_and_deposit: Callable,
get_accounts: Callable,
withdraw_channel: Callable,
event_handler: Callable,
) -> None:
(A, B, C) = get_accounts(3)
ev_handler = event_handler(token_network)
channel_identifier = create_channel_and_deposit(A, B, 10, 1)
txn_hash = withdraw_channel(channel_identifier, A, 5, UINT256_MAX, B)
ev_handler.add(txn_hash, ChannelEvent.WITHDRAW, check_withdraw(channel_identifier, A, 5))
txn_hash = withdraw_channel(channel_identifier, B, 2, UINT256_MAX, A, C)
ev_handler.add(txn_hash, ChannelEvent.WITHDRAW, check_withdraw(channel_identifier, B, 2))
ev_handler.check()
| 31.922222
| 93
| 0.651285
| 2,237
| 20,111
| 5.47072
| 0.075548
| 0.108351
| 0.060059
| 0.034319
| 0.844174
| 0.813777
| 0.794166
| 0.772757
| 0.751348
| 0.721115
| 0
| 0.01888
| 0.273084
| 20,111
| 629
| 94
| 31.972973
| 0.81825
| 0.046343
| 0
| 0.744954
| 0
| 0
| 0.005222
| 0
| 0
| 0
| 0.000157
| 0
| 0.011009
| 1
| 0.016514
| false
| 0
| 0.018349
| 0
| 0.034862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8ce530ff11e8c4aac8cf994a6bf305f8ac8250e
| 572
|
py
|
Python
|
python/phonenumbers/data/alt_format_372.py
|
vishnuku/python-phonenumbers
|
6ac2cdd06b7ccf709a8efb21629cf2c5f030e627
|
[
"Apache-2.0"
] | 7
|
2019-05-20T09:57:02.000Z
|
2020-01-10T05:30:48.000Z
|
python/phonenumbers/data/alt_format_372.py
|
carljm/python-phonenumbers
|
494044aaf75443dbfd62b8d1352b441af6a458ae
|
[
"Apache-2.0"
] | 5
|
2020-03-24T16:37:25.000Z
|
2021-06-10T21:24:54.000Z
|
python/phonenumbers/data/alt_format_372.py
|
carljm/python-phonenumbers
|
494044aaf75443dbfd62b8d1352b441af6a458ae
|
[
"Apache-2.0"
] | 1
|
2020-09-08T14:45:34.000Z
|
2020-09-08T14:45:34.000Z
|
"""Auto-generated file, do not edit by hand. 372 metadata"""
from ..phonemetadata import NumberFormat
PHONE_ALT_FORMAT_372 = [NumberFormat(pattern='(\\d)(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['6']), NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['[69]|4[3-8]|5(?:[02]|1(?:[0-8]|95)|5[0-478]|6(?:4[0-4]|5[1-589]))|7[1-9]']), NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{2})', format='\\1 \\2 \\3', leading_digits_pattern=['[69]|4[3-8]|5(?:[02]|1(?:[0-8]|95)|5[0-478]|6(?:4[0-4]|5[1-589]))|7[1-9]'])]
| 114.4
| 468
| 0.56993
| 109
| 572
| 2.908257
| 0.330275
| 0.025237
| 0.189274
| 0.085174
| 0.615142
| 0.476341
| 0.476341
| 0.476341
| 0.476341
| 0.37224
| 0
| 0.155268
| 0.054196
| 572
| 4
| 469
| 143
| 0.430684
| 0.094406
| 0
| 0
| 1
| 1
| 0.482422
| 0.416016
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
b8dc9a3bcec960197eeae2b926f8922047260a74
| 160
|
py
|
Python
|
app/requests.py
|
mightyking7/Smartcar-app
|
17066b1f18c216555f9b58c92cc700097262aa5e
|
[
"MIT"
] | null | null | null |
app/requests.py
|
mightyking7/Smartcar-app
|
17066b1f18c216555f9b58c92cc700097262aa5e
|
[
"MIT"
] | null | null | null |
app/requests.py
|
mightyking7/Smartcar-app
|
17066b1f18c216555f9b58c92cc700097262aa5e
|
[
"MIT"
] | null | null | null |
import smartcar
# todo
# response = smartcar.get_vehicle_ids(access_token)
#
# vid = response['vehicles'][0]
#
# vehicle = smartcar.Vehicle(vid, access_token)
| 17.777778
| 51
| 0.7375
| 20
| 160
| 5.7
| 0.6
| 0.192982
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007143
| 0.125
| 160
| 9
| 52
| 17.777778
| 0.807143
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b8eb605acec316c6c3a52fa3f15a0ff1503c972b
| 10,768
|
py
|
Python
|
allauthdemo/demo/models.py
|
ranamahmud/sust-doc
|
30b24254a116a0399283ff8a2ef9a698ad07247f
|
[
"MIT"
] | null | null | null |
allauthdemo/demo/models.py
|
ranamahmud/sust-doc
|
30b24254a116a0399283ff8a2ef9a698ad07247f
|
[
"MIT"
] | 12
|
2020-02-11T23:45:18.000Z
|
2022-03-11T23:16:57.000Z
|
allauthdemo/demo/models.py
|
ranamahmud/sust-doc
|
30b24254a116a0399283ff8a2ef9a698ad07247f
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils import timezone
class LibraryFine(models.Model):
G_CHOICES = (('1','Male'),('2','Female'))
user = models.ForeignKey('allauthdemo_auth.DemoUser',related_name='library_fines')
date = models.DateTimeField(default=timezone.now)
gender = models.CharField(max_length=1,choices = G_CHOICES,default=1)
book_count = models.IntegerField(default=1)
amount_fined = models.IntegerField(default=0)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
# def __str__(self):
# return self.title
class ShahparanHall(models.Model):
G_CHOICES = (('1','Male'),('2','Female'))
user = models.ForeignKey('allauthdemo_auth.DemoUser',related_name='shahparan_hall_fine')
date = models.DateTimeField(default=timezone.now)
gender = models.CharField(max_length=1,choices = G_CHOICES,default=1)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
father_name = models.CharField(max_length = 50,default="")
mother_name = models.CharField(max_length = 50,default="")
villagep = models.CharField(max_length = 50,default="")
postp = models.CharField(max_length = 50,default="")
thanap = models.CharField(max_length = 50,default="")
zillp = models.CharField(max_length = 50,default="")
villagec = models.CharField(max_length = 50,default="")
postc = models.CharField(max_length = 50,default="")
thanc = models.CharField(max_length = 50,default="")
zillc = models.CharField(max_length = 50,default="")
hons_first = models.CharField(max_length = 50,default="")
firstst_cgpa = models.IntegerField(default=0)
first_credit = models.IntegerField(default=0)
second_cgpa = models.IntegerField(default=0)
second_credit = models.IntegerField(default=0)
third_cgpa = models.IntegerField(default=0)
third_credit = models.IntegerField(default=0)
fourth_cgpa = models.IntegerField(default=0)
fourth_credit = models.IntegerField(default=0)
bank_money = models.DecimalField(max_digits = 10, decimal_places=2,blank=True, null=True)
bank_no = models.IntegerField(default=0)
def publish(self):
self.published_date = timezone.now()
self.save()
# def __str__(self):
# return self.title
class Transcript(models.Model):
G_CHOICES = (('1','Male'),('2','Female'))
user = models.ForeignKey('allauthdemo_auth.DemoUser',related_name='transcript')
date = models.DateTimeField(default=timezone.now)
gender = models.CharField(max_length=1,choices = G_CHOICES,default=1)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
honors_reg = models.IntegerField(default=0)
ms_reg = models.IntegerField(default=0)
discipline = models.CharField(max_length = 100, default = "")
exam_name_date = models.CharField(max_length = 100,default="")
address = models.CharField(max_length = 200, default="")
def publish(self):
self.published_date = timezone.now()
self.save()
# def __str__(self):
# return self.title
class Gradesheet(models.Model):
G_CHOICES = (('1','Male'),('2','Female'))
user = models.ForeignKey('allauthdemo_auth.DemoUser',related_name='gradesheet')
date = models.DateTimeField(default=timezone.now)
gender = models.CharField(max_length=1,choices = G_CHOICES,default=1)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
honors_reg = models.IntegerField(default=0)
discipline = models.CharField(max_length = 100, default = "")
school = models.CharField(max_length = 100, default = "")
exam_name_date = models.CharField(max_length = 100,default="")
cgpa = models.DecimalField(max_digits = 2, decimal_places=2,default=0)
letter = models.CharField(max_length=2,default="")
address = models.CharField(max_length = 200, default="")
nationlaity = models.CharField(max_length = 30,default="Bangladeshi")
def publish(self):
self.published_date = timezone.now()
self.save()
# def __str__(self):
# return self.title
class CashMemo(models.Model):
G_CHOICES = (('1','Male'),('2','Female'))
user = models.ForeignKey('allauthdemo_auth.DemoUser',related_name='cash_memo')
date = models.DateTimeField(default=timezone.now)
#Memo Fields
account_no = models.IntegerField(default=0)
branch = models.CharField(max_length = 30,default="")
name = models.CharField(max_length = 30,default="")
money1 = models.DecimalField(max_digits = 10, decimal_places=2,blank=True, null=True)
moeny2 = models.DecimalField(max_digits = 10, decimal_places=2,blank=True, null=True)
money3 = models.DecimalField(max_digits = 10, decimal_places=2,blank=True, null=True)
total = models.DecimalField(max_digits = 15, decimal_places=2,blank=True, null=True)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
# def __str__(self):
# return self.title
class S2(models.Model):
G_CHOICES = (('1','Male'),('2','Female'))
user = models.ForeignKey('allauthdemo_auth.DemoUser',related_name='s_2')
date = models.DateTimeField(default=timezone.now)
gender = models.CharField(max_length=1,choices = G_CHOICES,default=1)
#S2 Fields
admission_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
tution_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
union_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
reg_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
welfare_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
library_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
computer_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
rover_scout = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
bncc = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
travel = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
hall_seet = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
other = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
meical_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
id_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
book_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
festival_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
syllabus_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
diary_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
marksheet_fee = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
fine = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra1 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra2 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra3 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra4 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra5 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
total = models.DecimalField(max_digits = 10, decimal_places=2,blank=True, null=True)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
# def __str__(self):
# return self.title
class STD6(models.Model):
G_CHOICES = (('1','Male'),('2','Female'))
user = models.ForeignKey('allauthdemo_auth.DemoUser',related_name='std_6')
date = models.DateTimeField(default=timezone.now)
gender = models.CharField(max_length=1,choices = G_CHOICES,default=1)
#STD-6 Fields
total_theory = models.IntegerField( blank=True,null=True)
total_lab = models.IntegerField(blank=True,null=True)
exam_theory = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
exam_lab = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
drop_theory = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
drop_lab = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
certiricate = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
duplicate = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
registration_late = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
non_colligiate = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
course_modificatoin = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
other = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra1 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra2 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra3 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra4 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
extra5 = models.DecimalField(max_digits = 5, decimal_places=2,blank=True, null=True)
total = models.DecimalField(max_digits = 10, decimal_places=2,blank=True, null=True)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
# def __str__(self):
# return self.title
| 47.22807
| 101
| 0.709695
| 1,395
| 10,768
| 5.296057
| 0.106093
| 0.068219
| 0.098538
| 0.128858
| 0.902545
| 0.854494
| 0.785869
| 0.775853
| 0.763806
| 0.763806
| 0
| 0.023783
| 0.164376
| 10,768
| 228
| 102
| 47.22807
| 0.797288
| 0.034361
| 0
| 0.52907
| 0
| 0
| 0.032668
| 0.016864
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040698
| false
| 0
| 0.011628
| 0
| 0.837209
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
b8ecebe06612f4492edb736cb99f175643428c94
| 50,125
|
py
|
Python
|
tests/decompilation_tests.py
|
s3rvac/retdec-python
|
6360ecba75fae5420de2bf2ba63cf8a8c0e0c7cc
|
[
"MIT"
] | 102
|
2015-02-08T20:00:43.000Z
|
2022-02-06T20:34:24.000Z
|
tests/decompilation_tests.py
|
s3rvac/retdec-python
|
6360ecba75fae5420de2bf2ba63cf8a8c0e0c7cc
|
[
"MIT"
] | 5
|
2016-08-02T04:46:29.000Z
|
2018-10-11T06:08:32.000Z
|
tests/decompilation_tests.py
|
s3rvac/retdec-python
|
6360ecba75fae5420de2bf2ba63cf8a8c0e0c7cc
|
[
"MIT"
] | 24
|
2015-04-08T13:27:54.000Z
|
2020-12-07T21:22:23.000Z
|
#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tests for the :mod:`retdec.decompilation` module."""
import functools
import unittest
from retdec.decompilation import Decompilation
from retdec.decompilation import DecompilationPhase
from retdec.exceptions import ArchiveGenerationFailedError
from retdec.exceptions import CFGGenerationFailedError
from retdec.exceptions import CGGenerationFailedError
from retdec.exceptions import DecompilationFailedError
from retdec.exceptions import NoSuchCFGError
from retdec.exceptions import OutputNotRequestedError
from tests import mock
from tests.resource_tests import ResourceTestsBase
from tests.resource_tests import WithDisabledWaitingInterval
from tests.resource_tests import WithMockedIO
class DecompilationPhaseTests(unittest.TestCase):
"""Tests of :class:`retdec.decompilation.DecompilationPhase`."""
def test_arguments_passed_to_initializer_are_accessible(self):
phase = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
self.assertEqual(phase.name, 'NAME')
self.assertEqual(phase.part, 'PART')
self.assertEqual(phase.description, 'DESCRIPTION')
self.assertEqual(phase.completion, 75)
self.assertEqual(phase.warnings, ['some warning'])
def test_two_phases_with_same_data_are_equal(self):
phase1 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
phase2 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
self.assertEqual(phase1, phase2)
def test_two_phases_with_different_name_are_not_equal(self):
phase1 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
phase2 = DecompilationPhase(
name='OTHER NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
self.assertNotEqual(phase1, phase2)
def test_two_phases_with_different_part_are_not_equal(self):
phase1 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
phase2 = DecompilationPhase(
name='NAME',
part='OTHER PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
self.assertNotEqual(phase1, phase2)
def test_two_phases_with_different_description_are_not_equal(self):
phase1 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
phase2 = DecompilationPhase(
name='NAME',
part='PART',
description='OTHER DESCRIPTION',
completion=75,
warnings=['some warning']
)
self.assertNotEqual(phase1, phase2)
def test_two_phases_with_different_completion_are_not_equal(self):
phase1 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
phase2 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=100,
warnings=['some warning']
)
self.assertNotEqual(phase1, phase2)
def test_two_phases_with_different_warnings_are_not_equal(self):
phase1 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=[]
)
phase2 = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
self.assertNotEqual(phase1, phase2)
def test_repr_returns_correct_value(self):
phase = DecompilationPhase(
name='NAME',
part='PART',
description='DESCRIPTION',
completion=75,
warnings=['some warning']
)
self.assertEqual(
repr(phase),
("retdec.decompilation.DecompilationPhase(name='NAME', "
"part='PART', description='DESCRIPTION', completion=75, "
"warnings=['some warning'])")
)
class DecompilationTestsBase(ResourceTestsBase):
"""Base class of all tests of :class:`retdec.decompilation.Decompilation`.
"""
def status_with(self, status):
"""Adds missing keys to the given status and returns it."""
status = super().status_with(status)
if 'completion' not in status:
status['completion'] = 0
if 'phases' not in status:
status['phases'] = []
return status
def get_decompilation_without_any_cfg(self):
"""Returns a decompilation without any control-flow graphs."""
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {}
})
return Decompilation('ID', self.conn)
class DecompilationTests(DecompilationTestsBase):
"""Tests for :class:`retdec.decompilation.Decompilation`."""
def get_decompilation_that_did_not_request_cg_to_be_generated(self):
# This is signalized by a response that does not include the 'cg' key.
self.conn.send_get_request.return_value = self.status_with({})
return Decompilation('ID', self.conn)
def get_decompilation_that_did_not_request_cfgs_to_be_generated(self):
# This is signalized by a response that does not include the 'cfgs'
# key.
self.conn.send_get_request.return_value = self.status_with({})
return Decompilation('ID', self.conn)
def get_decompilation_that_did_not_request_archive_to_be_generated(self):
# This is signalized by a response that does not include the 'archive'
# key.
self.conn.send_get_request.return_value = self.status_with({})
return Decompilation('ID', self.conn)
def test_get_completion_checks_status_on_first_call_and_returns_correct_value(self):
self.conn.send_get_request.return_value = self.status_with({
'completion': 20
})
d = Decompilation('ID', self.conn)
completion = d.get_completion()
self.assertEqual(completion, 20)
self.assert_get_request_was_sent_with('/ID/status')
def test_get_phases_checks_status_on_first_call_and_returns_correct_value(self):
self.conn.send_get_request.return_value = self.status_with({
'phases': [
{
'name': 'name1',
'part': 'part1',
'description': 'description1',
'completion': 1,
'warnings': []
},
{
'name': 'name2',
'part': 'part2',
'description': 'description2',
'completion': 2,
'warnings': []
}
]
})
d = Decompilation('ID', self.conn)
phases = d.get_phases()
self.assertEqual(len(phases), 2)
self.assertEqual(phases[0].name, 'name1')
self.assertEqual(phases[1].name, 'name2')
self.assert_get_request_was_sent_with('/ID/status')
def test_get_phases_ignores_unknown_phase_attributes(self):
self.conn.send_get_request.return_value = self.status_with({
'phases': [
{
'name': 'name',
'part': 'part',
'description': 'description',
'completion': 0,
'warnings': [],
'unknown_attr': None # This attribute is to be ignored.
}
]
})
d = Decompilation('ID', self.conn)
phases = d.get_phases()
self.assertEqual(len(phases), 1)
self.assertFalse(hasattr(phases[0], 'unknown_attr'))
def test_cg_generation_has_finished_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.cg_generation_has_finished()
self.assert_get_request_was_sent_with('/ID/status')
def test_cg_generation_has_finished_returns_true_when_generated(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cg_generation_has_finished())
def test_cg_generation_has_finished_returns_true_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cg_generation_has_finished())
def test_cg_generation_has_finished_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cg_generation_has_finished())
def test_cg_generation_has_finished_raises_exception_when_cg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cg_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.cg_generation_has_finished()
def test_cg_generation_has_succeeded_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.cg_generation_has_succeeded()
self.assert_get_request_was_sent_with('/ID/status')
def test_cg_generation_has_succeeded_returns_true_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cg_generation_has_succeeded())
def test_cg_generation_has_succeeded_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cg_generation_has_succeeded())
def test_cg_generation_has_succeeded_returns_false_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cg_generation_has_succeeded())
def test_cg_generation_has_succeeded_raises_exception_when_cg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cg_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.cg_generation_has_succeeded()
def test_cg_generation_has_failed_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
d.cg_generation_has_failed()
self.assert_get_request_was_sent_with('/ID/status')
def test_cg_generation_has_failed_returns_true_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cg_generation_has_failed())
def test_cg_generation_has_failed_returns_false_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cg_generation_has_failed())
def test_cg_generation_has_failed_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cg_generation_has_failed())
def test_cg_generation_has_failed_raises_exception_when_cg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cg_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.cg_generation_has_failed()
def test_get_cg_error_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
d.get_cg_generation_error()
self.assert_get_request_was_sent_with('/ID/status')
def test_get_cg_generation_error_returns_none_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertIsNone(d.get_cg_generation_error())
def test_get_cg_generation_error_returns_correct_error_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertEqual(d.get_cg_generation_error(), 'error message')
def test_get_cg_generation_error_raises_exception_when_cg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cg_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.get_cg_generation_error()
def test_funcs_with_cfg_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
d.funcs_with_cfg
self.assert_get_request_was_sent_with('/ID/status')
def test_funcs_with_cfg_returns_correct_value_when_cfgs_generation_requested(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func1': {
'generated': True,
'failed': False,
'error': None
},
'my_func2': {
'generated': False,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertEqual(d.funcs_with_cfg, ['my_func1', 'my_func2'])
def test_funcs_with_cfg_raises_exception_when_cfg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cfgs_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.funcs_with_cfg
def test_cfg_generation_has_finished_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
d.cfg_generation_has_finished('my_func')
self.assert_get_request_was_sent_with('/ID/status')
def test_cfg_generation_has_finished_returns_true_when_generated(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cfg_generation_has_finished('my_func'))
def test_cfg_generation_has_finished_returns_true_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cfg_generation_has_finished('my_func'))
def test_cfg_generation_has_finished_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cfg_generation_has_finished('my_func'))
def test_cfg_generation_has_finished_raises_exception_when_cfg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cfgs_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.cfg_generation_has_finished('my_func')
def test_cfg_generation_has_finished_raises_exception_when_no_such_cfg(self):
d = self.get_decompilation_without_any_cfg()
with self.assertRaises(NoSuchCFGError):
d.cfg_generation_has_finished('my_func')
def test_cfg_generation_has_succeeded_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
d.cfg_generation_has_succeeded('my_func')
self.assert_get_request_was_sent_with('/ID/status')
def test_cfg_generation_has_succeeded_returns_true_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cfg_generation_has_succeeded('my_func'))
def test_cfg_generation_has_succeeded_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cfg_generation_has_succeeded('my_func'))
def test_cfg_generation_has_succeeded_returns_false_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cfg_generation_has_succeeded('my_func'))
def test_cfg_generation_has_succeeded_raises_exception_when_cfg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cfgs_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.cfg_generation_has_succeeded('my_func')
def test_cfg_generation_has_succeeed_raises_exception_when_no_such_cfg(self):
d = self.get_decompilation_without_any_cfg()
with self.assertRaises(NoSuchCFGError):
d.cfg_generation_has_succeeded('my_func')
def test_cfg_generation_has_failed_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
d.cfg_generation_has_failed('my_func')
self.assert_get_request_was_sent_with('/ID/status')
def test_cfg_generation_has_failed_returns_true_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.cfg_generation_has_failed('my_func'))
def test_cfg_generation_has_failed_returns_false_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cfg_generation_has_failed('my_func'))
def test_cfg_generation_has_failed_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.cfg_generation_has_failed('my_func'))
def test_cfg_generation_has_failed_raises_exception_when_cfg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cfgs_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.cfg_generation_has_failed('my_func')
def test_cfg_generation_has_failed_raises_exception_when_no_such_cfg(self):
d = self.get_decompilation_without_any_cfg()
with self.assertRaises(NoSuchCFGError):
d.cfg_generation_has_failed('my_func')
def test_get_cfg_generation_error_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
d.get_cfg_generation_error('my_func')
self.assert_get_request_was_sent_with('/ID/status')
def test_get_cfg_generation_error_returns_none_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
self.assertIsNone(d.get_cfg_generation_error('my_func'))
def test_get_cfg_generation_error_returns_correct_error_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
self.assertEqual(d.get_cfg_generation_error('my_func'), 'error message')
def test_get_cfg_generation_error_raises_exception_when_cfg_not_requested(self):
d = self.get_decompilation_that_did_not_request_cfgs_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.get_cfg_generation_error('my_func')
def test_get_cfg_generation_error_raises_exception_when_no_such_cfg(self):
d = self.get_decompilation_without_any_cfg()
with self.assertRaises(NoSuchCFGError):
d.cfg_generation_has_failed('my_func')
def test_archive_generation_has_finished_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.archive_generation_has_finished()
self.assert_get_request_was_sent_with('/ID/status')
def test_archive_generation_has_finished_returns_true_when_generated(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.archive_generation_has_finished())
def test_archive_generation_has_finished_returns_true_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.archive_generation_has_finished())
def test_archive_generation_has_finished_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.archive_generation_has_finished())
def test_archive_generation_has_finished_raises_exception_when_archive_not_requested(self):
d = self.get_decompilation_that_did_not_request_archive_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.archive_generation_has_finished()
def test_archive_generation_has_succeeded_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.archive_generation_has_succeeded()
self.assert_get_request_was_sent_with('/ID/status')
def test_archive_generation_has_succeeded_returns_true_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.archive_generation_has_succeeded())
def test_archive_generation_has_succeeded_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.archive_generation_has_succeeded())
def test_archive_generation_has_succeeded_returns_false_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.archive_generation_has_succeeded())
def test_archive_generation_has_succeeded_raises_exception_when_archive_not_requested(self):
d = self.get_decompilation_that_did_not_request_archive_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.archive_generation_has_succeeded()
def test_archive_generation_has_failed_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
d.archive_generation_has_failed()
self.assert_get_request_was_sent_with('/ID/status')
def test_archive_generation_has_failed_returns_true_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertTrue(d.archive_generation_has_failed())
def test_archive_generation_has_failed_returns_false_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.archive_generation_has_failed())
def test_archive_generation_has_failed_returns_false_when_not_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertFalse(d.archive_generation_has_failed())
def test_archive_generation_has_failed_raises_exception_when_archive_not_requested(self):
d = self.get_decompilation_that_did_not_request_archive_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.archive_generation_has_failed()
def test_get_archive_error_checks_status_on_first_call(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
d.get_archive_generation_error()
self.assert_get_request_was_sent_with('/ID/status')
def test_get_archive_generation_error_returns_none_when_succeeded(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
self.assertIsNone(d.get_archive_generation_error())
def test_get_archive_generation_error_returns_correct_error_when_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
self.assertEqual(d.get_archive_generation_error(), 'error message')
def test_get_archive_generation_error_raises_exception_when_archive_not_requested(self):
d = self.get_decompilation_that_did_not_request_archive_to_be_generated()
with self.assertRaises(OutputNotRequestedError):
d.get_archive_generation_error()
def test_repr_returns_correct_value(self):
d = Decompilation('ID', self.conn)
self.assertEqual(
repr(d),
"<retdec.decompilation.Decompilation id='ID'>"
)
# WithDisabledWaitingInterval has to be put as the first base class, see its
# description for the reason why.
class DecompilationWaitUntilFinishedTests(WithDisabledWaitingInterval,
DecompilationTestsBase):
"""Tests for :func:`retdec.resource.Decompilation.wait_until_finished()`.
"""
def test_returns_when_resource_is_finished(self):
self.conn.send_get_request.return_value = self.status_with({
'completion': 100,
'finished': True,
'succeeded': True
})
d = Decompilation('ID', self.conn)
d.wait_until_finished()
self.assert_get_request_was_sent_with('/ID/status')
def test_calls_callback_when_resource_finishes(self):
self.conn.send_get_request.return_value = self.status_with({
'completion': 100,
'finished': True,
'succeeded': True
})
d = Decompilation('ID', self.conn)
callback = mock.Mock()
d.wait_until_finished(callback)
callback.assert_called_once_with(d)
def test_calls_callback_when_resource_status_changes(self):
self.conn.send_get_request.side_effect = [
self.status_with({
'completion': 0,
'finished': False,
'succeeded': False
}), self.status_with({
'completion': 15,
'finished': False,
'succeeded': False
}), self.status_with({
'completion': 100,
'finished': True,
'succeeded': True
})
]
d = Decompilation('ID', self.conn)
callback = mock.Mock()
d.wait_until_finished(callback)
self.assertEqual(len(callback.mock_calls), 2)
def test_raises_exception_by_default_when_resource_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'finished': True,
'failed': True,
'error': 'error message'
})
d = Decompilation('ID', self.conn)
with self.assertRaises(DecompilationFailedError) as cm:
d.wait_until_finished()
self.assertRegex(str(cm.exception), r'.*error message.*')
def test_calls_on_failure_when_it_is_callable(self):
self.conn.send_get_request.return_value = self.status_with({
'finished': True,
'failed': True,
'error': 'error message'
})
d = Decompilation('ID', self.conn)
on_failure = mock.Mock()
d.wait_until_finished(on_failure=on_failure)
on_failure.assert_called_once_with('error message')
def test_does_not_raise_exception_when_on_failure_is_none(self):
self.conn.send_get_request.return_value = self.status_with({
'finished': True,
'failed': True,
'error': 'error message'
})
d = Decompilation('ID', self.conn)
d.wait_until_finished(on_failure=None)
# WithDisabledWaitingInterval has to be put as the first base class, see its
# description for the reason why.
class DecompilationWaitUntilCGIsGeneratedTests(WithDisabledWaitingInterval,
DecompilationTestsBase):
"""Tests for
:class:`retdec.decompilation.Decompilation.wait_until_cg_is_generated()`.
"""
def test_returns_when_cg_is_generated(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.wait_until_cg_is_generated()
self.assert_get_request_was_sent_with('/ID/status')
def test_waits_until_cg_is_generated(self):
self.conn.send_get_request.side_effect = [
self.status_with({
'cg': {
'generated': False,
'failed': False,
'error': None
}
}), self.status_with({
'cg': {
'generated': True,
'failed': False,
'error': None
}
})
]
d = Decompilation('ID', self.conn)
d.wait_until_cg_is_generated()
self.assertEqual(len(self.conn.send_get_request.mock_calls), 2)
def test_raises_exception_by_default_when_generation_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
with self.assertRaises(CGGenerationFailedError) as cm:
d.wait_until_cg_is_generated()
self.assertRegex(str(cm.exception), r'.*error message.*')
def test_calls_on_failure_when_it_is_callable(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
on_failure = mock.Mock()
d.wait_until_cg_is_generated(on_failure=on_failure)
on_failure.assert_called_once_with('error message')
def test_does_not_raise_exception_when_on_failure_is_none(self):
self.conn.send_get_request.return_value = self.status_with({
'cg': {
'generated': False,
'failed': True,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.wait_until_cg_is_generated(on_failure=None)
# WithDisabledWaitingInterval has to be put as the first base class, see its
# description for the reason why.
class DecompilationWaitUntilCFGIsGeneratedTests(WithDisabledWaitingInterval,
DecompilationTestsBase):
"""Tests for
:class:`retdec.decompilation.Decompilation.wait_until_cfg_is_generated()`.
"""
def test_returns_when_cfg_is_generated(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
d.wait_until_cfg_is_generated('my_func')
self.assert_get_request_was_sent_with('/ID/status')
def test_waits_until_cfg_is_generated(self):
self.conn.send_get_request.side_effect = [
self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': False,
'error': None
}
}
}), self.status_with({
'cfgs': {
'my_func': {
'generated': True,
'failed': False,
'error': None
}
}
})
]
d = Decompilation('ID', self.conn)
d.wait_until_cfg_is_generated('my_func')
self.assertEqual(len(self.conn.send_get_request.mock_calls), 2)
def test_raises_exception_by_default_when_generation_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
with self.assertRaises(CFGGenerationFailedError) as cm:
d.wait_until_cfg_is_generated('my_func')
self.assertIn('error message', str(cm.exception))
def test_calls_on_failure_when_it_is_callable(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': 'error message'
}
}
})
d = Decompilation('ID', self.conn)
on_failure = mock.Mock()
d.wait_until_cfg_is_generated('my_func', on_failure=on_failure)
on_failure.assert_called_once_with('error message')
def test_does_not_raise_exception_when_on_failure_is_none(self):
self.conn.send_get_request.return_value = self.status_with({
'cfgs': {
'my_func': {
'generated': False,
'failed': True,
'error': None
}
}
})
d = Decompilation('ID', self.conn)
d.wait_until_cfg_is_generated('my_func', on_failure=None)
def test_raises_exception_when_no_such_cfg(self):
d = self.get_decompilation_without_any_cfg()
with self.assertRaises(NoSuchCFGError):
d.wait_until_cfg_is_generated('my_func')
# WithDisabledWaitingInterval has to be put as the first base class, see its
# description for the reason why.
class DecompilationWaitUntilArchiveIsGeneratedTests(WithDisabledWaitingInterval,
DecompilationTestsBase):
"""Tests for
:class:`retdec.decompilation.Decompilation.wait_until_archive_is_generated()`.
"""
def test_returns_when_archive_is_generated(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.wait_until_archive_is_generated()
self.assert_get_request_was_sent_with('/ID/status')
def test_waits_until_archive_is_generated(self):
self.conn.send_get_request.side_effect = [
self.status_with({
'archive': {
'generated': False,
'failed': False,
'error': None
}
}), self.status_with({
'archive': {
'generated': True,
'failed': False,
'error': None
}
})
]
d = Decompilation('ID', self.conn)
d.wait_until_archive_is_generated()
self.assertEqual(len(self.conn.send_get_request.mock_calls), 2)
def test_raises_exception_by_default_when_generation_failed(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
with self.assertRaises(ArchiveGenerationFailedError) as cm:
d.wait_until_archive_is_generated()
self.assertRegex(str(cm.exception), r'.*error message.*')
def test_calls_on_failure_when_it_is_callable(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': 'error message'
}
})
d = Decompilation('ID', self.conn)
on_failure = mock.Mock()
d.wait_until_archive_is_generated(on_failure=on_failure)
on_failure.assert_called_once_with('error message')
def test_does_not_raise_exception_when_on_failure_is_none(self):
self.conn.send_get_request.return_value = self.status_with({
'archive': {
'generated': False,
'failed': True,
'error': None
}
})
d = Decompilation('ID', self.conn)
d.wait_until_archive_is_generated(on_failure=None)
# WithMockedIO has to be put as the first base class, see its description for
# the reason why.
class DecompilationGetOutputsTests(WithMockedIO, DecompilationTestsBase):
"""Tests for methods that obtain outputs from a
:class:`retdec.decompilation.Decompilation`.
"""
def test_get_hll_code_obtains_file_contents(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_file_contents(
d.get_hll_code,
'/ID/outputs/hll',
is_text_file=True
)
def test_save_hll_code_stores_file_to_cwd_when_directory_is_not_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_hll_code,
'/ID/outputs/hll',
directory=None
)
def test_save_hll_code_stores_file_to_directory_when_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_hll_code,
'/ID/outputs/hll',
directory='dir'
)
def test_get_dsm_code_obtains_file_contents(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_file_contents(
d.get_dsm_code,
'/ID/outputs/dsm',
is_text_file=True
)
def test_save_dsm_code_stores_file_to_cwd_when_directory_is_not_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_dsm_code,
'/ID/outputs/dsm',
directory=None
)
def test_save_dsm_code_stores_file_to_directory_when_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_dsm_code,
'/ID/outputs/dsm',
directory='dir'
)
def test_save_cg_stores_file_to_cwd_when_directory_is_not_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_cg,
'/ID/outputs/cg',
directory=None
)
def test_save_cg_stores_file_to_directory_when_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_cg,
'/ID/outputs/cg',
directory='dir'
)
def test_save_cfg_stores_file_to_cwd_when_directory_is_not_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
functools.partial(d.save_cfg, 'my_func'),
'/ID/outputs/cfgs/my_func',
directory=None
)
def test_save_cfg_stores_file_to_directory_when_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
functools.partial(d.save_cfg, 'my_func'),
'/ID/outputs/cfgs/my_func',
directory='dir'
)
def test_save_archive_stores_file_to_cwd_when_directory_is_not_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_archive,
'/ID/outputs/archive',
directory=None
)
def test_save_archive_stores_file_to_directory_when_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_archive,
'/ID/outputs/archive',
directory='dir'
)
def test_save_binary_stores_file_to_cwd_when_directory_is_not_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_binary,
'/ID/outputs/binary',
directory=None
)
def test_save_binary_stores_file_to_directory_when_given(self):
d = Decompilation('ID', self.conn)
self.assert_obtains_and_saves_file(
d.save_binary,
'/ID/outputs/binary',
directory='dir'
)
| 33.573342
| 96
| 0.584758
| 5,235
| 50,125
| 5.222541
| 0.042598
| 0.049159
| 0.062546
| 0.075713
| 0.907535
| 0.88921
| 0.879846
| 0.854901
| 0.845757
| 0.84049
| 0
| 0.00301
| 0.317426
| 50,125
| 1,492
| 97
| 33.595845
| 0.796078
| 0.033756
| 0
| 0.715728
| 0
| 0
| 0.08785
| 0.003332
| 0
| 0
| 0
| 0
| 0.101766
| 1
| 0.098402
| false
| 0.000841
| 0.011775
| 0
| 0.12111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7701940aba0fd8da40efc330d31d1d3175dedc70
| 7,839
|
py
|
Python
|
week4/task1_1.py
|
mcv-m6-video/mcv-m6-2018-team6
|
8d374468713683b10e01d81c06cf2dcc1aa2e229
|
[
"MIT"
] | null | null | null |
week4/task1_1.py
|
mcv-m6-video/mcv-m6-2018-team6
|
8d374468713683b10e01d81c06cf2dcc1aa2e229
|
[
"MIT"
] | null | null | null |
week4/task1_1.py
|
mcv-m6-video/mcv-m6-2018-team6
|
8d374468713683b10e01d81c06cf2dcc1aa2e229
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 16 20:43:17 2018
@author: ferran
"""
from __future__ import division
import sys
import datetime
import imageio
import math
import task1
import matplotlib.pyplot as plt
import numpy as np
import time
from skimage import morphology
import cv2
import os
from sklearn.metrics import auc
from sklearn.metrics import precision_recall_fscore_support as PRFmetrics
from sklearn.metrics import precision_recall_curve
from scipy.ndimage import binary_fill_holes, generate_binary_structure
import cv2
import sys
import functions
import os
import math
import task1
import matplotlib.pyplot as plt
import numpy as np
#from frameworkJFMV2 import MOG as g
#from frameworkJFMV2 import Original as o
def image_translate(curr_img,future_img):
block_size=25
area_size=20
# motion_matrix=task1.compute_block_matching(future_img, curr_img,block_size,area_size)
OFtest=compute_block_matching(curr_img,future_img, block_size,area_size)
# x_blocks=motion_matrix.shape[0]
# y_blocks=motion_matrix.shape[1]
# OFtest=task1.create_OFtest(future_img, motion_matrix, block_size, x_blocks, y_blocks)
# x=OFtest.shape[0]
# y=OFtest.shape[1]
# flow=np.zeros([curr_img.size[0],curr_img.size[1]])
# OFtest = cv2.calcOpticalFlowFarneback(curr_img, future_img,flow, pyr_scale=0.5, levels=3, winsize=30,iterations=5, poly_n=5, poly_sigma=1.2, flags=0)
# U0=int(np.mean(OFtest[int(0.5*x):int(0.75*x),int(0.05*y):int(0.25*y),0]))
# U1=int(np.mean(OFtest[int(0.5*x):int(0.75*x),int(0.05*y):int(0.25*y),1]))
U0= OFtest[210,80,0]
U1= OFtest[210,80,1]
print U0
print U1
print ('space')
num_rows, num_cols = future_img.shape[:2]
translation_matrix = np.float32([ [1,0,-U0], [0,1,-U1] ])
img_translation = cv2.warpAffine(future_img, translation_matrix, (num_cols, num_rows))
return img_translation
curr_dir = '/home/ferran/Desktop/M6_project/week4/training/image_0/000045_10.png'
future_dir = '/home/ferran/Desktop/M6_project/week4/training/image_0/000045_11.png'
gt_dir= '/home/ferran/Desktop/M6_project/week4/training/flow_noc/000045_10.png'
future_img = cv2.imread(future_dir,0)
curr_img = cv2.imread(curr_dir,0)
block_size=np.arange(10,51,10)
area_size=np.arange(1,26,5)
OFgt = cv2.imread(gt_dir,-1)
x=block_size.shape[0]
y=area_size.shape[0]
matrix_m=np.zeros([x,y])
matrix_p=np.zeros([x,y])
c=0
for b in block_size:
v=0
for a in area_size:
#matrix=np.array[]
OFtest=calcOpticalFlowBM (curr_img, future_img, b, a)
msen,pepn=task1.MSEN_PEPN(OFtest,OFgt)
matrix_m[c,v]=msen
matrix_p[c,v]=pepn
v=v+1
c=c+1
#plt.savefig('PR_gray_highway1.png') #40 30 50 60
plt.plot(area_size,matrix_p[0],label='Block size=10')
plt.plot(area_size,matrix_p[1],label='Block size=20')
plt.plot(area_size,matrix_p[2],label='Block size=30')
plt.plot(area_size,matrix_p[3],label='Block size=40')
plt.plot(area_size,matrix_p[4],label='Block size=50')
#plt.plot(area_size,matrix_p[5],label='Block size=50')
#plt.plot(area_size,matrix_p[6],label='Block size=60')
#plt.plot(area_size,matrix_p[7],label='Block size=70')
#plt.plot(area_size,matrix_p[8],label='Block size=40')
#plt.plot(area_size,matrix_p[9],label='Block size=45')
#plt.plot(area_size,matrix_p[10],label='Block size=50')
#plt.plot(area_size,matrix_p[11],label='Block size=55')
#plt.plot(area_size,matrix_p[12],label='Block size=25')
#plt.plot(area_size,matrix_m[6],label='Block size=30')
#plt.plot(recall_mop,precision_mop,label='With Moprhological filters | AUC=0,62')
plt.xlabel('Area of search')
plt.ylabel('PEPN')
plt.title('Frame 45')
plt.legend()
#plt.savefig('PR_gray_highway1.png')
plt.show()
print ('ok 1')
#task1.PlotOpticalFlow1(OFtest,OFgt)
plt.plot(area_size,matrix_m[0],label='Block size=10')
plt.plot(area_size,matrix_m[1],label='Block size=20')
plt.plot(area_size,matrix_m[2],label='Block size=30')
plt.plot(area_size,matrix_m[3],label='Block size=40')
plt.plot(area_size,matrix_m[4],label='Block size=50')
#plt.plot(area_size,matrix_m[5],label='Block size=50')
#plt.plot(area_size,matrix_m[6],label='Block size=60')
#plt.plot(area_size,matrix_m[7],label='Block size=70')
#plt.plot(area_size,matrix_m[10],label='Block size=50')
#plt.plot(area_size,matrix_m[11],label='Block size=55')
#plt.plot(area_size,matrix_p[12],label='Block size=25')
#plt.plot(area_size,matrix_m[6],label='Block size=30')
#plt.plot(recall_mop,precision_mop,label='With Moprhological filters | AUC=0,62')
plt.xlabel('Area of search')
plt.ylabel('MSEN')
plt.title('Frame 45')
plt.legend()
#plt.savefig('PR_gray_highway1.png')
plt.show()
print ('ok 2')
curr_dir = '/home/ferran/Desktop/M6_project/week4/training/image_0/000157_10.png'
future_dir = '/home/ferran/Desktop/M6_project/week4/training/image_0/000157_11.png'
gt_dir= '/home/ferran/Desktop/M6_project/week4/training/flow_noc/000157_10.png'
future_img = cv2.imread(future_dir,0)
curr_img = cv2.imread(curr_dir,0)
block_size=np.arange(10,51,10)
area_size=np.arange(1,26,5)
OFgt = cv2.imread(gt_dir,-1)
x=block_size.shape[0]
y=area_size.shape[0]
matrix_m2=np.zeros([x,y])
matrix_p2=np.zeros([x,y])
c=0
for b in block_size:
v=0
for a in area_size:
#matrix=np.array[]
# motion_matrix=task1.compute_block_matching(future_img, curr_img,b,a)
## motion_matrix1=motion_matrix
#
# x_blocks=motion_matrix.shape[0]
# y_blocks=motion_matrix.shape[1]
#
# OFtest=task1.create_OFtest(future_img, motion_matrix, b, x_blocks, y_blocks)
OFtest=calcOpticalFlowBM (curr_img, future_img, b, a)
msen,pepn=task1.MSEN_PEPN(OFtest,OFgt)
matrix_m2[c,v]=msen
matrix_p2[c,v]=pepn
v=v+1
c=c+1
#plt.savefig('PR_gray_highway1.png') #40 30 50 60
plt.plot(area_size,matrix_p2[0],label='Block size=10')
plt.plot(area_size,matrix_p2[1],label='Block size=20')
plt.plot(area_size,matrix_p2[2],label='Block size=30')
plt.plot(area_size,matrix_p2[3],label='Block size=40')
plt.plot(area_size,matrix_p2[4],label='Block size=50')
#plt.plot(area_size,matrix_p2[5],label='Block size=50')
#plt.plot(area_size,matrix_p2[6],label='Block size=60')
#plt.plot(area_size,matrix_p2[7],label='Block size=70')
#plt.plot(area_size,matrix_p[8],label='Block size=40')
#plt.plot(area_size,matrix_p[9],label='Block size=45')
#plt.plot(area_size,matrix_p[10],label='Block size=50')
#plt.plot(area_size,matrix_p[11],label='Block size=55')
#plt.plot(area_size,matrix_p[12],label='Block size=25')
#plt.plot(area_size,matrix_m[6],label='Block size=30')
#plt.plot(recall_mop,precision_mop,label='With Moprhological filters | AUC=0,62')
plt.xlabel('Area of search')
plt.ylabel('PEPN')
plt.title('Frame 157')
plt.legend()
#plt.savefig('PR_gray_highway1.png')
plt.show()
print ('ok 3')
#task1.PlotOpticalFlow1(OFtest,OFgt)
plt.plot(area_size,matrix_m2[0],label='Block size=10')
plt.plot(area_size,matrix_m2[1],label='Block size=20')
plt.plot(area_size,matrix_m2[2],label='Block size=30')
plt.plot(area_size,matrix_m2[3],label='Block size=40')
plt.plot(area_size,matrix_m2[4],label='Block size=50')
#plt.plot(area_size,matrix_m2[4],label='Block size=40')
#plt.plot(area_size,matrix_m2[5],label='Block size=50')
#plt.plot(area_size,matrix_m2[6],label='Block size=60')
#plt.plot(area_size,matrix_m2[7],label='Block size=70')
#plt.plot(area_size,matrix_m[10],label='Block size=50')
#plt.plot(area_size,matrix_m[11],label='Block size=55')
#plt.plot(area_size,matrix_p[12],label='Block size=25')
#plt.plot(area_size,matrix_m[6],label='Block size=30')
#plt.plot(recall_mop,precision_mop,label='With Moprhological filters | AUC=0,62')
plt.xlabel('Area of search')
plt.ylabel('MSEN')
plt.title('Frame 157')
plt.legend()
#plt.savefig('PR_gray_highway1.png')
plt.show()
print ('ok 4')
#########################
| 35.152466
| 154
| 0.732364
| 1,416
| 7,839
| 3.870763
| 0.127825
| 0.103448
| 0.140485
| 0.145047
| 0.825579
| 0.815545
| 0.787265
| 0.78617
| 0.78617
| 0.767196
| 0
| 0.064151
| 0.097206
| 7,839
| 222
| 155
| 35.310811
| 0.710329
| 0.4447
| 0
| 0.512
| 0
| 0
| 0.189988
| 0.097735
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.192
| null | null | 0.056
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
771e47f16c487cf8dad83475a04b023e2630d6e1
| 84,751
|
py
|
Python
|
pybmrb/Spectra.py
|
uwbmrb/PyBMRB
|
0b1a94ca0cd54d8800f6a41ad0492005b49b1cbc
|
[
"MIT"
] | 6
|
2020-02-14T13:47:36.000Z
|
2021-08-01T09:42:28.000Z
|
pybmrb/Spectra.py
|
uwbmrb/PyBMRB
|
0b1a94ca0cd54d8800f6a41ad0492005b49b1cbc
|
[
"MIT"
] | 2
|
2019-01-15T16:31:31.000Z
|
2019-01-16T16:02:59.000Z
|
pybmrb/Spectra.py
|
uwbmrb/PyBMRB
|
0b1a94ca0cd54d8800f6a41ad0492005b49b1cbc
|
[
"MIT"
] | 4
|
2016-01-19T16:52:15.000Z
|
2021-02-24T04:08:52.000Z
|
#!/usr/bin/env python3
"""
This module is used to visualize the one dimensional chemical shift list from BMRB entry or NMR-STAR file as a
two dimensional NMR spectrum. It simulates peak positions of \u00b9\u2075N-HSQC, \u00b9\u00b3C-HSQC and
\u00b9H-\u00b9H-TOCSY. It can also simulate a generic 2D spectrum between any two given pair of atoms. This module is
useful to compare user data with any BMRB entry/list of entries as a overlaid NMR spectra.
"""
import logging
import csv
import pynmrstar
from pybmrb import ChemicalShift, ChemicalShiftStatistics
import plotly.express as px
from typing import Union, List, Optional
def create_c13hsqc_peaklist(bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
auth_tag: Optional[bool] = False,
draw_trace: Optional[bool] = False) -> tuple:
"""
Converts one dimensional chemical shifts from BMRB entries/NMR-STAR files/PyNMRSTAR entry objects into
\u00b9\u00b3C-HSQC peak list
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
ch_atoms = {'ALA': [('HA', 'CA'), ('HB*', 'CB')],
'ARG': [('HA', 'CA'), ('HB*', 'CB'), ('HG*', 'CG'), ('HD*', 'CD')],
'ASN': [('HA', 'CA'), ('HB*', 'CB')],
'ASP': [('HA', 'CA'), ('HB*', 'CB')],
'CYS': [('HA', 'CA'), ('HB*', 'CB')],
'GLN': [('HA', 'CA'), ('HB*', 'CB'), ('HG*', 'CG')],
'GLU': [('HA', 'CA'), ('HB*', 'CB'), ('HG*', 'CG')],
'GLY': [('HA*', 'CA')],
'HIS': [('HA', 'CA'), ('HB*', 'CB'), ('HD2', 'CD2'), ('HE1', 'CE1')],
'ILE': [('HA', 'CA'), ('HB', 'CB'), ('HG2*', 'CG2'), ('HG1*', 'CG1'), ('HD1*', 'CD1')],
'LEU': [('HA', 'CA'), ('HB*', 'CB'), ('HG', 'CG'), ('HD1*', 'CD1'), ('HD2*', 'CD2')],
'LYS': [('HA', 'CA'), ('HB*', 'CB'), ('HG*', 'CG'), ('HD*', 'CD'), ('HE*', 'CE')],
'MET': [('HA', 'CA'), ('HB*', 'CB'), ('HG*', 'CG'), ('HE*', 'CE')],
'PHE': [('HA', 'CA'), ('HB*', 'CB'), ('HD1', 'CD1'), ('HE1', 'CE1'), ('HD2', 'CD2'), ('HE2', 'CE2'),
('HZ', 'CZ')],
'PRO': [('HA', 'CA'), ('HB*', 'CB'), ('HG*', 'CG'), ('HD*', 'CD')],
'SER': [('HA', 'CA'), ('HB*', 'CB')],
'TRP': [('HA', 'CA'), ('HB*', 'CB'), ('HD1', 'CD1'), ('HE3', 'CE3'), ('HZ2', 'CZ2'),
('HH2', 'CH2')],
'TYR': [('HA', 'CA'), ('HB*', 'CB'), ('HD1', 'CD1'), ('HE1', 'CE1'), ('HD2', 'CD2'),
('HE2', 'CE2')],
'THR': [('HA', 'CA'), ('HB', 'CB'), ('HG2*', 'CG2')],
'VAL': [('HA', 'CA'), ('HB', 'CB'), ('HG1*', 'CG1'), ('HG2*', 'CG2')],
}
cs_data = {}
cs_data_bmrb = ChemicalShift.from_bmrb(bmrb_ids=bmrb_ids, auth_tag=auth_tag)
cs_data.update(cs_data_bmrb)
if input_file_names is not None:
cs_data_file = ChemicalShift.from_file(input_file_names=input_file_names, auth_tag=auth_tag)
cs_data.update(cs_data_file)
if entry_objects is not None:
cs_data_obj = ChemicalShift.from_entry_object(entry_objects=entry_objects, auth_tag=auth_tag)
cs_data.update(cs_data_obj)
data_set = []
x = []
y = []
res = []
info = []
atom_ids = {}
for data_id in cs_data.keys():
for chain in cs_data[data_id].keys():
for seq_no in cs_data[data_id][chain]['seq_ids']:
residue = cs_data[data_id][chain][seq_no][list(cs_data[data_id][chain][seq_no].keys())[0]][0]
ch_list = ch_atoms[residue]
for ch_atom in ch_list:
for atm_x in cs_data[data_id][chain][seq_no].keys():
if ('*' in ch_atom[0]
and ch_atom[0][:-1] == atm_x[:len(ch_atom[0][:-1])]) \
or (ch_atom[0] == atm_x):
cs_x = cs_data[data_id][chain][seq_no][atm_x][2]
for atm_y in cs_data[data_id][chain][seq_no].keys():
if ('*' in ch_atom[1]
and ch_atom[1][:-1] == atm_y[:len(ch_atom[1][:-1])]) \
or (ch_atom[1] == atm_y):
cs_y = cs_data[data_id][chain][seq_no][atm_y][2]
data_set.append(data_id)
x.append(cs_x)
y.append(cs_y)
res.append(residue)
tag = '{}-{}-{}-{}-{}-{}'.format(data_id, chain, seq_no, residue, atm_x, atm_y)
info.append(tag)
atom_id = '{}-{}-{}-{}-{}'.format(chain, seq_no, residue, atm_x, atm_y)
if draw_trace:
if atom_id not in atom_ids.keys():
atom_ids[atom_id] = [[], []]
atom_ids[atom_id][0].append(cs_x)
atom_ids[atom_id][1].append(cs_y)
cs_track = {}
if draw_trace:
for k in atom_ids.keys():
if len(atom_ids[k][0]) > 1:
cs_track[k] = atom_ids[k]
return x, y, data_set, info, res, cs_track
def create_tocsy_peaklist(bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
auth_tag: Optional[bool] = False,
draw_trace: Optional[bool] = False) -> tuple:
"""
Converts one dimensional chemical shifts from BMRB entries/NMR-STAR files/PyNMRSTAR entry objects
into into \u00b9H-\u00b9H-TOCSY peak list
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
cs_data = {}
cs_data_bmrb = ChemicalShift.from_bmrb(bmrb_ids=bmrb_ids, auth_tag=auth_tag)
cs_data.update(cs_data_bmrb)
if input_file_names is not None:
cs_data_file = ChemicalShift.from_file(input_file_names=input_file_names, auth_tag=auth_tag)
cs_data.update(cs_data_file)
if entry_objects is not None:
cs_data_obj = ChemicalShift.from_entry_object(entry_objects=entry_objects, auth_tag=auth_tag)
cs_data.update(cs_data_obj)
data_set = []
x = []
y = []
res = []
info = []
atom_ids = {}
for data_id in cs_data.keys():
for chain in cs_data[data_id].keys():
for seq_no in cs_data[data_id][chain]['seq_ids']:
residue = cs_data[data_id][chain][seq_no][list(cs_data[data_id][chain][seq_no].keys())[0]][0]
ch_list = [('H*', 'H*')]
for ch_atom in ch_list:
for atm_x in cs_data[data_id][chain][seq_no].keys():
if ('*' in ch_atom[0]
and ch_atom[0][:-1] == atm_x[:len(ch_atom[0][:-1])]) \
or (ch_atom[0] == atm_x):
cs_x = cs_data[data_id][chain][seq_no][atm_x][2]
for atm_y in cs_data[data_id][chain][seq_no].keys():
if ('*' in ch_atom[1]
and ch_atom[1][:-1] == atm_y[:len(ch_atom[1][:-1])]) \
or (ch_atom[1] == atm_y):
cs_y = cs_data[data_id][chain][seq_no][atm_y][2]
data_set.append(data_id)
x.append(cs_x)
y.append(cs_y)
res.append(residue)
tag = '{}-{}-{}-{}-{}-{}'.format(data_id, chain, seq_no, residue, atm_x, atm_y)
info.append(tag)
atom_id = '{}-{}-{}-{}-{}'.format(chain, seq_no, residue, atm_x, atm_y)
if draw_trace:
if atom_id not in atom_ids.keys():
atom_ids[atom_id] = [[], []]
atom_ids[atom_id][0].append(cs_x)
atom_ids[atom_id][1].append(cs_y)
cs_track = {}
if draw_trace:
for k in atom_ids.keys():
if len(atom_ids[k][0]) > 1:
cs_track[k] = atom_ids[k]
return x, y, data_set, info, res, cs_track
def create_2d_peaklist(atom_x: str,
atom_y: str,
bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
auth_tag: Optional[bool] = False,
draw_trace: Optional[bool] = False,
include_preceding: Optional[bool] = False,
include_next: Optional[bool] = False,
legend: Optional[str] = None) -> tuple:
"""
Converts one dimensional chemical shifts from BMRB entries/NMR-STAR files/PyNMRSTAR entry objects
into into generic 2D peak list
:param atom_x: atom name for X coordinate in IUPAC format
:type atom_x: str
:param atom_y: atom name for Y coordinate in IUPAC format
:type atom_y: str
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:param include_preceding: include peaks from i-1 residue on the Y axis, defaults to False
:type include_preceding: bool, optional
:param include_next: include peaks from i+i residue on the Y axis, defaults to False
:type include_next: bool, optional
:param legend: legends are disabled by default. Residue types are indicated by color and data sets are
indicated by symbol, displaying the combination of both will create a very long list of legend. Optionally
either 'residue' or 'dataset' can be used to color code the scatter plot by residue type
or data set and display the legend, defaults to None
:type legend: str, optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track,psn,seq_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
cs_data = {}
cs_data_bmrb = ChemicalShift.from_bmrb(bmrb_ids, auth_tag=auth_tag)
cs_data.update(cs_data_bmrb)
if input_file_names is not None:
cs_data_file = ChemicalShift.from_file(input_file_names, auth_tag=auth_tag)
cs_data.update(cs_data_file)
if entry_objects is not None:
cs_data_obj = ChemicalShift.from_entry_object(entry_objects=entry_objects, auth_tag=auth_tag)
cs_data.update(cs_data_obj)
data_set = []
x = []
y = []
res = []
info = []
psn = []
atom_ids = {}
seq_trace = [[], [], []]
for data_id in cs_data.keys():
for chain in cs_data[data_id].keys():
for seq_no in cs_data[data_id][chain]['seq_ids']:
residue = cs_data[data_id][chain][seq_no][list(cs_data[data_id][chain][seq_no].keys())[0]][0]
ch_list = [(atom_x, atom_y)]
for ch_atom in ch_list:
for atm_x in cs_data[data_id][chain][seq_no].keys():
if ('*' in ch_atom[0]
and ch_atom[0][:-1] == atm_x[:len(ch_atom[0][:-1])]) \
or (ch_atom[0] == atm_x):
cs_x = cs_data[data_id][chain][seq_no][atm_x][2]
for atm_y in cs_data[data_id][chain][seq_no].keys():
if ('*' in ch_atom[1]
and ch_atom[1][:-1] == atm_y[:len(ch_atom[1][:-1])]) \
or (ch_atom[1] == atm_y):
if include_preceding:
try:
residue_p = cs_data[data_id][chain][seq_no - 1][
list(cs_data[data_id][chain][seq_no - 1].keys())[0]][0]
cs_y = cs_data[data_id][chain][seq_no - 1][atm_y][2]
data_set.append(data_id)
x.append(cs_x)
y.append(cs_y)
seq_trace[0].append(cs_x)
seq_trace[1].append(cs_y)
seq_trace[2].append(seq_no - 1)
res.append(residue)
if legend == 'psn':
psn.append('Preceding')
else:
psn.append(0.5)
tag = '{}-{}-{}-{}-{}-{}-{}-{}'.format(data_id, chain, seq_no, residue,
seq_no - 1, residue_p, atm_x,
atm_y)
info.append(tag)
atom_id = '{}-{}-{}-{}-{}-{}-{}'.format(chain, seq_no, residue, seq_no - 1,
residue_p, atm_x, atm_y)
if draw_trace:
if atom_id not in atom_ids.keys():
atom_ids[atom_id] = [[], []]
atom_ids[atom_id][0].append(cs_x)
atom_ids[atom_id][1].append(cs_y)
except KeyError:
pass
cs_y = cs_data[data_id][chain][seq_no][atm_y][2]
data_set.append(data_id)
x.append(cs_x)
y.append(cs_y)
seq_trace[0].append(cs_x)
seq_trace[1].append(cs_y)
seq_trace[2].append(seq_no)
res.append(residue)
if legend == 'psn':
psn.append('Same')
else:
psn.append(1)
tag = '{}-{}-{}-{}-{}-{}'.format(data_id, chain, seq_no, residue, atm_x, atm_y)
info.append(tag)
atom_id = '{}-{}-{}-{}-{}'.format(chain, seq_no, residue, atm_x, atm_y)
if draw_trace:
if atom_id not in atom_ids.keys():
atom_ids[atom_id] = [[], []]
atom_ids[atom_id][0].append(cs_x)
atom_ids[atom_id][1].append(cs_y)
if include_next:
try:
residue_p = cs_data[data_id][chain][seq_no + 1][
list(cs_data[data_id][chain][seq_no + 1].keys())[0]][0]
cs_y = cs_data[data_id][chain][seq_no + 1][atm_y][2]
data_set.append(data_id)
x.append(cs_x)
y.append(cs_y)
seq_trace[0].append(cs_x)
seq_trace[1].append(cs_y)
seq_trace[2].append(seq_no + 1)
res.append(residue)
if legend == 'psn':
psn.append('Next')
else:
psn.append(0.5)
tag = '{}-{}-{}-{}-{}-{}-{}-{}'.format(data_id, chain, seq_no, residue,
seq_no + 1, residue_p, atm_x,
atm_y)
info.append(tag)
atom_id = '{}-{}-{}-{}-{}-{}-{}'.format(chain, seq_no, residue, seq_no + 1,
residue_p, atm_x, atm_y)
if draw_trace:
if atom_id not in atom_ids.keys():
atom_ids[atom_id] = [[], []]
atom_ids[atom_id][0].append(cs_x)
atom_ids[atom_id][1].append(cs_y)
except KeyError:
pass
cs_track = {}
if draw_trace:
for k in atom_ids.keys():
if len(atom_ids[k][0]) > 1:
cs_track[k] = atom_ids[k]
return x, y, data_set, info, res, cs_track, psn, seq_trace
def create_n15hsqc_peaklist(bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
auth_tag: Optional[bool] = False,
draw_trace: Optional[bool] = False,
include_sidechain: Optional[bool] = True) -> tuple:
"""
Converts one dimensional chemical shifts from BMRB entries/NMR-STAR files/PyNMRSTAR
entry objects into \u00b9\u2075N-HSQC peak list
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:param include_sidechain: include peaks from side chains, defaults to True
:type include_sidechain: bool, optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
atom_x = 'H'
atom_y = 'N'
sidechain_nh_atoms = {'ARG': {
'ARG-HH11-NH1': ['HH11', 'NH1'],
'ARG-HH12-NH1': ['HH12', 'NH1'],
'ARG-HH21-NH2': ['HH21', 'NH2'],
'ARG-HH22-NH2': ['HH22', 'NH2'],
'ARG-HE-NE': ['HE', 'NE']},
'GLN': {
'GLN-HE21-NE2': ['HE21', 'NE2'],
'GLN-HE22-NE2': ['HE22', 'NE2']},
'ASN': {
'ASN-HD21-ND2': ['HD21', 'ND2'],
'ASN-HD22-ND2': ['HD22', 'ND2']},
'HIS': {
'HIS-HD1-ND1': ['HD1', 'ND1'],
'HIS-HE2-ND1': ['HE2', 'NE2']},
'TRP': {
'TRP-HE1-NE1': ['HE1', 'NE1']},
'LYS': {
'LYS-HZ-NZ': ['HZ', 'NZ'],
'LYS-HZ1-NZ': ['HZ1', 'NZ'],
'LYS-HZ2-NZ': ['HZ2', 'NZ'],
'LYS-HZ3-NZ': ['HZ3', 'NZ']}
}
cs_data = {}
if bmrb_ids is not None:
cs_data_bmrb = ChemicalShift.from_bmrb(bmrb_ids=bmrb_ids, auth_tag=auth_tag)
cs_data.update(cs_data_bmrb)
if input_file_names is not None:
cs_data_file = ChemicalShift.from_file(input_file_names=input_file_names, auth_tag=auth_tag)
cs_data.update(cs_data_file)
if entry_objects is not None:
cs_data_obj = ChemicalShift.from_entry_object(entry_objects=entry_objects, auth_tag=auth_tag)
cs_data.update(cs_data_obj)
data_set = []
x = []
y = []
res = []
info = []
atom_ids = {}
for data_id in cs_data.keys():
for chain in cs_data[data_id].keys():
for seq_no in cs_data[data_id][chain]['seq_ids']:
try:
x_cs = cs_data[data_id][chain][seq_no][atom_x][2]
y_cs = cs_data[data_id][chain][seq_no][atom_y][2]
residue = cs_data[data_id][chain][seq_no][atom_y][0]
res.append(residue)
tag = '{}-{}-{}-{}-H-N'.format(data_id, chain, seq_no, residue)
data_set.append(data_id)
atom_id = '{}-{}-{}'.format(chain, seq_no, residue)
if draw_trace:
if atom_id not in atom_ids.keys():
atom_ids[atom_id] = [[], []]
atom_ids[atom_id][0].append(x_cs)
atom_ids[atom_id][1].append(y_cs)
x.append(x_cs)
y.append(y_cs)
info.append(tag)
if include_sidechain:
if residue in sidechain_nh_atoms.keys():
for atom_list in sidechain_nh_atoms[residue]:
ax = sidechain_nh_atoms[residue][atom_list][0]
ay = sidechain_nh_atoms[residue][atom_list][1]
try:
x_cs = cs_data[data_id][chain][seq_no][ax][2]
y_cs = cs_data[data_id][chain][seq_no][ay][2]
res.append(residue)
tag = '{}-{}-{}-{}'.format(data_id, chain, seq_no,
atom_list)
data_set.append(data_id)
atom_id = '{}-{}-{}'.format(chain, seq_no,
atom_list)
if draw_trace:
if atom_id not in atom_ids.keys():
atom_ids[atom_id] = [[], []]
atom_ids[atom_id][0].append(x_cs)
atom_ids[atom_id][1].append(y_cs)
x.append(x_cs)
y.append(y_cs)
info.append(tag)
except KeyError:
logging.debug('Data not found:{},{},{}'.format(data_id, chain, seq_no))
except KeyError:
logging.debug('Data not found:{},{},{}'.format(data_id, chain, seq_no))
cs_track = {}
if draw_trace:
for k in atom_ids.keys():
if len(atom_ids[k][0]) > 1:
cs_track[k] = atom_ids[k]
return x, y, data_set, info, res, cs_track
def n15hsqc(bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
auth_tag: Optional[bool] = False,
legend: Optional[str] = None,
draw_trace: Optional[bool] = False,
include_sidechain: Optional[bool] = True,
peak_list: Optional[str] = None,
output_format: Optional[str] = 'html',
output_file: Optional[str] = None,
output_image_width: Optional[int] = 800,
output_image_height: Optional[int] = 600,
show_visualization: Optional[bool] = True) -> tuple:
"""
Plots \u00b9\u2075N-HSQC spectrum for a given BMRB entry/NMR-STAR file/PyNMRSTAR entry object;
This function can be used to compare different data sets as overlaid NMR Spectra. It overlays \u00b9\u2075N-HSQC
for a given list of BMRB entries/NMR-STAR files/PyNMRSTAR entry objects and draw lines connecting peaks
from residues at the same sequence location in different data sets
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param legend: legends are disabled by default. Residue types are indicated by color and data sets are
indicated by symbol, displaying the combination of both will create a very long list of legend. Optionally
either 'residue' or 'dataset' can be used to color code the scatter plot by residue type
or data set and display the legend, defaults to None
:type legend: str, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:param include_sidechain: include peaks from side chains, defaults to True
:type include_sidechain: bool, optional
:param peak_list: comma-separated two column file can be given as optional unassigned peak list,
which can be overlaid on the spectrum as another data set, defaults to None
:type peak_list: str, optional
:param output_format: visualizations can be exported as interactive 'html' file
or as static images in 'jpg','jpeg','png','pdf','webp','svg', defaults to 'html'
:type output_format: str, optional
:param output_file: file name to export visualization
:type output_file: str, optional
:param output_image_width: The width of the exported image in layout pixels, defaults to 800
:type output_image_width: int, optional
:param output_image_height: The height of the exported image in layout pixels, defaults to 600
:type output_image_height: int, optional
:param show_visualization: Visualization automatically opens in a web browser or as
embedded visualization in Jupyter Notebook. This feature can be disabled
by setting this flag as False, defaults to True
:type show_visualization: bool, optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
x1 = []
y1 = []
if peak_list is not None:
with open(peak_list) as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
for row in spamreader:
x1.append(float(row[0]))
y1.append(float(row[1]))
peak_list_2d = create_n15hsqc_peaklist(bmrb_ids,
input_file_names=input_file_names,
entry_objects=entry_objects,
auth_tag=auth_tag,
draw_trace=draw_trace,
include_sidechain=include_sidechain)
x = peak_list_2d[0]
y = peak_list_2d[1]
data_set = peak_list_2d[2]
info = peak_list_2d[3]
res = peak_list_2d[4]
cs_track = peak_list_2d[5]
if (len(x) == 0 or len(y) == 0) and (len(x1) == 0 or len(y1) == 0):
logging.error('Required chemical shifts not found')
raise ValueError('Required chemical shifts not found')
if legend is None:
if len(x1) and len(y1):
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>15</sup>N HSQC peak positions',
hover_name=info,
color=data_set,
labels={"color": "Data set",
# "symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>15</sup>N (ppm)',
}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
else:
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>15</sup>N HSQC peak positions',
symbol=data_set,
hover_name=info,
color=res,
labels={"color": "Residue",
"symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>15</sup>N (ppm)',
}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
if len(x1) and len(y1):
fig.update_layout(showlegend=True)
else:
fig.update_layout(showlegend=False)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'residue':
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>15</sup>N HSQC peak positions',
hover_name=info,
color=res,
labels={"color": "Residue",
# "symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>15</sup>N (ppm)',
}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, mode='lines',
opacity=0.7, hover_name=k)
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'dataset':
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>15</sup>N HSQC peak positions',
hover_name=info,
color=data_set,
labels={"color": "Data set",
# "symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>15</sup>N (ppm)',
}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0],
y=cs_track[k][1],
name=k,
opacity=0.7,
mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
else:
raise ValueError('legend type not supported')
if show_visualization:
fig.show()
if output_file is not None:
if output_format == 'html':
fig.write_html(output_file)
elif output_format in ['png', 'jpg', 'jpeg', 'webp', 'svg', 'pdf', 'json']:
fig.write_image(file=output_file, format=output_format, width=output_image_width,
height=output_image_height)
else:
logging.error('Output file format not supported:{}'.format(output_format))
return x, y, data_set, info, res, cs_track
def c13hsqc(bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
auth_tag: Optional[bool] = False,
legend: Optional[str] = None,
draw_trace: Optional[bool] = False,
peak_list: Optional[str] = None,
output_format: Optional[str] = 'html',
output_file: Optional[str] = None,
output_image_width: Optional[int] = 800,
output_image_height: Optional[int] = 600,
show_visualization: Optional[bool] = True) -> tuple:
"""
Plots \u00b9\u00b3C-HSQC spectrum for a given BMRB entry/NMR-STAR file/PyNMRSTAR entry object;
This function can be used to compare different data sets as overlaid NMR Spectra. It overlays \u00b9\u00b3C-HSQC
for a given list of BMRB entries/NMR-STAR files/PyNMRSTAR entry objects and draw lines connecting peaks
from residues at the same sequence location in different data sets
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param legend: legends are disabled by default. Residue types are indicated by color and data sets are
indicated by symbol, displaying the combination of both will create a very long list of legend. Optionally
either 'residue' or 'dataset' can be used to color code the scatter plot by residue type
or data set and display the legend, defaults to None
:type legend: str, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:param peak_list: comma-separated two column file can be given as optional unassigned peak list,
which can be overlaid on the spectrum as another data set, defaults to None
:type peak_list: str, optional
:param output_format: visualizations can be exported as interactive 'html' file
or as static images in 'jpg','jpeg','png','pdf','webp','svg', defaults to 'html'
:type output_format: str, optional
:param output_file: file name to export visualization
:type output_file: str, optional
:param output_image_width: The width of the exported image in layout pixels, defaults to 800
:type output_image_width: int, optional
:param output_image_height: The height of the exported image in layout pixels, defaults to 600
:type output_image_height: int, optional
:param show_visualization: Visualization automatically opens in a web browser or as
embedded visualization in Jupyter Notebook. This feature can be disabled
by setting this flag as False, defaults to True
:type show_visualization: bool, optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
peak_list_2d = create_c13hsqc_peaklist(bmrb_ids,
input_file_names=input_file_names,
entry_objects=entry_objects,
auth_tag=auth_tag,
draw_trace=draw_trace)
x1 = []
y1 = []
if peak_list is not None:
with open(peak_list) as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
for row in spamreader:
x1.append(float(row[0]))
y1.append(float(row[1]))
x = peak_list_2d[0]
y = peak_list_2d[1]
data_set = peak_list_2d[2]
info = peak_list_2d[3]
res = peak_list_2d[4]
cs_track = peak_list_2d[5]
if len(x) == 0 or len(y) == 0:
logging.error('Required chemical shifts not found')
raise ValueError('Required chemical shifts not found')
if legend is None:
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>13</sup>C HSQC peak positions',
symbol=data_set,
hover_name=info,
color=res,
labels={"color": "Residue",
"symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>13</sup>C (ppm)'}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1],
name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_layout(showlegend=False)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'residue':
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>13</sup>C HSQC peak positions',
hover_name=info,
color=res,
labels={"color": "Residue",
# "symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>13</sup>C (ppm)'}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, mode='lines', opacity=0.7)
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'dataset':
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>13</sup>C HSQC peak positions',
hover_name=info,
color=data_set,
labels={"color": "Data set",
# "symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>13</sup>C (ppm)'}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
else:
raise ValueError('legend type not supported')
if show_visualization:
fig.show()
if output_file is not None:
if output_format == 'html':
fig.write_html(output_file)
elif output_format in ['png', 'jpg', 'jpeg', 'webp', 'svg', 'pdf', 'json']:
fig.write_image(file=output_file, format=output_format, width=output_image_width,
height=output_image_height)
else:
logging.error('Output file format not supported:{}'.format(output_format))
return x, y, data_set, info, res, cs_track
def tocsy(bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
auth_tag: Optional[bool] = False,
legend: Optional[str] = None,
draw_trace: Optional[bool] = False,
peak_list: Optional[str] = None,
output_format: Optional[str] = 'html',
output_file: Optional[str] = None,
output_image_width: Optional[int] = 800,
output_image_height: Optional[int] = 600,
show_visualization: Optional[bool] = True) -> tuple:
"""
Plots \u00b9H-\u00b9H-TOCSY spectrum for a given BMRB entry/NMR-STAR file/PyNMRSTAR entry object;
This function can be used to compare different data sets as overlaid NMR Spectra. It overlays \u00b9H-\u00b9H-TOCSY
for a given list of BMRB entries/NMR-STAR files/PyNMRSTAR entry objects and draw lines connecting peaks
from residues at the same sequence location in different data sets
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param legend: legends are disabled by default. Residue types are indicated by color and data sets are
indicated by symbol, displaying the combination of both will create a very long list of legend. Optionally
either 'residue' or 'dataset' can be used to color code the scatter plot by residue type
or data set and display the legend, defaults to None
:type legend: str, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:param peak_list: comma-separated two column file can be given as optional unassigned peak list,
which can be overlaid on the spectrum as another data set, defaults to None
:type peak_list: str, optional
:param output_format: visualizations can be exported as interactive 'html' file
or as static images in 'jpg','jpeg','png','pdf','webp','svg', defaults to 'html'
:type output_format: str, optional
:param output_file: file name to export visualization
:type output_file: str, optional
:param output_image_width: The width of the exported image in layout pixels, defaults to 800
:type output_image_width: int, optional
:param output_image_height: The height of the exported image in layout pixels, defaults to 600
:type output_image_height: int, optional
:param show_visualization: Visualization automatically opens in a web browser or as
embedded visualization in Jupyter Notebook. This feature can be disabled
by setting this flag as False, defaults to True
:type show_visualization: bool, optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
peak_list_2d = create_tocsy_peaklist(bmrb_ids,
input_file_names=input_file_names,
entry_objects=entry_objects,
auth_tag=auth_tag,
draw_trace=draw_trace)
x1 = []
y1 = []
if peak_list is not None:
with open(peak_list) as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
for row in spamreader:
x1.append(float(row[0]))
y1.append(float(row[1]))
x = peak_list_2d[0]
y = peak_list_2d[1]
data_set = peak_list_2d[2]
info = peak_list_2d[3]
res = peak_list_2d[4]
cs_track = peak_list_2d[5]
if len(x) == 0 or len(y) == 0:
logging.error('Required chemical shifts not found')
raise ValueError('Required chemical shifts not found')
if legend is None:
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>1</sup>H TOCSY peak positions',
symbol=data_set,
hover_name=info,
color=res,
labels={"color": "Residue",
"symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>1</sup>H (ppm)'}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_layout(showlegend=False)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'residue':
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>1</sup>H TOCSY peak positions',
hover_name=info,
color=res,
labels={"color": "Residue",
# "symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>1</sup>H (ppm)'}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, mode='lines',
opacity=0.7)
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'dataset':
fig = px.scatter(x=x, y=y,
title='Simulated <sup>1</sup>H-<sup>1</sup>H TOCSY peak positions',
hover_name=info,
color=data_set,
labels={"color": "Data set",
# "symbol": "Data set",
"x": '<sup>1</sup>H (ppm)',
"y": '<sup>1</sup>H (ppm)'}, opacity=0.7).update(layout=dict(title=dict(x=0.5)))
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
else:
raise ValueError('legend type not supported')
if show_visualization:
fig.show()
if output_file is not None:
if output_format == 'html':
fig.write_html(output_file)
elif output_format in ['png', 'jpg', 'jpeg', 'webp', 'svg', 'pdf', 'json']:
fig.write_image(file=output_file, format=output_format, width=output_image_width,
height=output_image_height)
else:
logging.error('Output file format not supported:{}'.format(output_format))
return x, y, data_set, info, res, cs_track
def generic_2d(atom_x: str,
atom_y: str,
bmrb_ids: Optional[Union[str, List[str], int, List[int]]] = None,
input_file_names: Optional[Union[str, List[str]]] = None,
entry_objects: Optional[Union[pynmrstar.Entry, List[pynmrstar.Entry]]] = None,
auth_tag: Optional[bool] = False,
legend: Optional[str] = None,
draw_trace: Optional[bool] = False,
peak_list: Optional[str] = None,
include_preceding: Optional[bool] = False,
include_next: Optional[bool] = False,
full_walk: Optional[bool] = False,
seq_walk: Optional[bool] = False,
output_format: Optional[str] = 'html',
output_file: Optional[str] = None,
output_image_width: Optional[int] = 800,
output_image_height: Optional[int] = 600,
show_visualization: Optional[bool] = True) -> tuple:
"""
Plots generic 2D spectrum for a given BMRB entry/NMR-STAR file/PyNMRSTAR entry object;
This function can be used to compare different data sets as overlaid NMR Spectra. It overlays the 2D spectra
for a given list of BMRB entries/NMR-STAR files/PyNMRSTAR entry objects and draw lines connecting peaks
from residues at the same sequence location in different data sets
:param atom_x: atom name for X coordinate in IUPAC format
:type atom_x: str
:param atom_y: atom name for Y coordinate in IUPAC format
:type atom_y: str
:param bmrb_ids: single BMRB entry ID or multiple BMRB entry IDs as list, defaults to None
:type bmrb_ids: str/int/list, optional
:param input_file_names: single input file name or multiple input file names as list, defaults to None
:type input_file_names: str, optional
:param entry_objects: single PyNMRSTAR entry object or multiple PyNMRSTAR entry objects as list, defaults to None
:type entry_objects: PyNMRSTAR object/list, optional
:param auth_tag: Use sequence numbers from _Atom_chem_shift.Auth_seq_ID instead of _Atom_chem_shift.Comp_index_ID
in the NMR-STAR file/BMRB entry, defaults to False
:type auth_tag: bool, optional
:param legend: legends are disabled by default. Residue types are indicated by color and data sets are
indicated by symbol, displaying the combination of both will create a very long list of legend. Optionally
either 'residue' or 'dataset' can be used to color code the scatter plot by residue type
or data set and display the legend, defaults to None
:type legend: str, optional
:param draw_trace: draw line connecting peaks from residues at the same sequence location in different
data sets, defaults to False
:type draw_trace: bool optional
:param include_preceding: include peaks from i-1 residue on the Y axis, defaults to False
:type include_preceding: bool, optional
:param include_next: include peaks from i+i residue on the Y axis, defaults to False
:type include_next: bool, optional
:param seq_walk: draw line connecting i->i-1/i+1 to next i->i-1/i+1 for only
continuous sequence segments, defaults to False
:type seq_walk: bool, optional
:param full_walk: draw line connecting i->i-1/i+1 to next i->i-1/i+1 for the
full sequence ignoring any missing residues, defaults to False
:type full_walk: bool, optional
:param peak_list: comma-separated two column file can be given as optional unassigned peak list,
which can be overlaid on the spectrum as another data set, defaults to None
:type peak_list: str, optional
:param output_format: visualizations can be exported as interactive 'html' file
or as static images in 'jpg','jpeg','png','pdf','webp','svg', defaults to 'html'
:type output_format: str, optional
:param output_file: file name to export visualization
:type output_file: str, optional
:param output_image_width: The width of the exported image in layout pixels, defaults to 800
:type output_image_width: int, optional
:param output_image_height: The height of the exported image in layout pixels, defaults to 600
:type output_image_height: int, optional
:param show_visualization: Visualization automatically opens in a web browser or as
embedded visualization in Jupyter Notebook. This feature can be disabled
by setting this flag as False, defaults to True
:type show_visualization: bool, optional
:return: tuple of lists and dictionary (x,y,data_set,info,res,cs_track)
if draw_trace is True cs_track={ matching atoms:[cs_values]} else cs_track={}
:rtype: tuple
"""
if bmrb_ids is None and input_file_names is None and entry_objects is None:
logging.error('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
raise TypeError('At least one of three parameters must be present; bmrb_ids, input_file_names, entry_objects')
peak_list_2d = create_2d_peaklist(atom_x=atom_x,
atom_y=atom_y,
bmrb_ids=bmrb_ids,
input_file_names=input_file_names,
entry_objects=entry_objects,
auth_tag=auth_tag,
draw_trace=draw_trace,
include_preceding=include_preceding,
include_next=include_next,
legend=legend)
x1 = []
y1 = []
if peak_list is not None:
with open(peak_list) as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
for row in spamreader:
x1.append(float(row[0]))
y1.append(float(row[1]))
x = peak_list_2d[0]
y = peak_list_2d[1]
data_set = peak_list_2d[2]
info = peak_list_2d[3]
res = peak_list_2d[4]
cs_track = peak_list_2d[5]
psn = peak_list_2d[6]
seq_trace = peak_list_2d[7]
def sequence_walk(seq_t, filt=True):
seq_traces = {}
j = 0
s = -999
for i in range(len(seq_t[0])):
if seq_t[2][i] == s or seq_t[2][i] == s + 1:
s = seq_t[2][i]
seq_traces['walk_{}'.format(j)][0].append(seq_t[0][i])
seq_traces['walk_{}'.format(j)][1].append(seq_t[1][i])
else:
s = seq_t[2][i]
j += 1
seq_traces['walk_{}'.format(j)] = [[seq_t[0][i]], [seq_t[1][i]]]
sq_w = {}
if filt:
for k1 in seq_traces.keys():
if len(seq_traces[k1][0]) > 1:
sq_w[k1] = seq_traces[k1]
else:
sq_w = seq_traces
return sq_w
sq_walk = sequence_walk(seq_trace)
if len(x) == 0 or len(y) == 0:
logging.error('Required chemical shifts not found')
raise ValueError('Required chemical shifts not found')
if legend is None:
fig = px.scatter(x=x, y=y,
title='Simulated {}-{} COSY peak positions'.format(atom_x, atom_y),
symbol=data_set,
hover_name=info,
size=psn,
color=res,
labels={"color": "Residue",
"symbol": "Data set",
"x": '{} (ppm)'.format(atom_x),
"y": '{} (ppm)'.format(atom_y)}, opacity=0.7)
fig.update(layout=dict(title=dict(x=0.5)))
if full_walk:
fig.add_scatter(x=seq_trace[0], y=seq_trace[1], name='Full walk', opacity=0.7, mode='lines')
if seq_walk:
for k in sq_walk.keys():
fig.add_scatter(x=sq_walk[k][0], y=sq_walk[k][1], name=k, opacity=0.7, mode='lines')
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_layout(showlegend=False)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'residue':
fig = px.scatter(x=x, y=y,
title='Simulated {}-{} COSY peak positions'.format(atom_x, atom_y),
hover_name=info,
color=res,
size=psn,
labels={"color": "Residue",
# "symbol": "Data set",
"x": '{} (ppm)'.format(atom_x),
"y": '{} (ppm)'.format(atom_y)}, opacity=0.7).update(
layout=dict(title=dict(x=0.5)))
if full_walk:
fig.add_scatter(x=seq_trace[0], y=seq_trace[1], name='Full walk', opacity=0.7, mode='lines')
if seq_walk:
for k in sq_walk.keys():
fig.add_scatter(x=sq_walk[k][0], y=sq_walk[k][1], name=k, opacity=0.7, mode='lines')
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'dataset':
fig = px.scatter(x=x, y=y,
title='Simulated {}-{} COSY peak positions'.format(atom_x, atom_y),
hover_name=info,
color=data_set,
size=psn,
labels={"color": "Data set",
# "symbol": "Data set",
"x": '{} (ppm)'.format(atom_x),
"y": '{} (ppm)'.format(atom_y)}, opacity=0.7).update(
layout=dict(title=dict(x=0.5)))
if full_walk:
fig.add_scatter(x=seq_trace[0], y=seq_trace[1], name='Full walk', opacity=0.7, mode='lines')
if seq_walk:
for k in sq_walk.keys():
fig.add_scatter(x=sq_walk[k][0], y=sq_walk[k][1], name=k, opacity=0.7, mode='lines')
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
elif legend == 'psn':
fig = px.scatter(x=x, y=y,
title='Simulated {}-{} COSY peak positions'.format(atom_x, atom_y),
hover_name=info,
color=psn,
labels={"color": "Data set",
# "symbol": "Data set",
"x": '{} (ppm)'.format(atom_x),
"y": '{} (ppm)'.format(atom_y)}, opacity=0.7).update(
layout=dict(title=dict(x=0.5)))
if full_walk:
fig.add_scatter(x=seq_trace[0], y=seq_trace[1], name='Full walk', opacity=0.7, mode='lines')
if seq_walk:
for k in sq_walk.keys():
fig.add_scatter(x=sq_walk[k][0], y=sq_walk[k][1], name=k, opacity=0.7, mode='lines')
if draw_trace:
for k in cs_track.keys():
fig.add_scatter(x=cs_track[k][0], y=cs_track[k][1], name=k, opacity=0.7, mode='lines')
if peak_list is not None:
fig.add_scatter(x=x1, y=y1, mode='markers', name='Peak list', opacity=0.7)
fig.update_xaxes(autorange="reversed")
fig.update_yaxes(autorange="reversed")
else:
raise ValueError('legend type not supported')
if show_visualization:
fig.show()
if output_file is not None:
if output_format == 'html':
fig.write_html(output_file)
elif output_format in ['png', 'jpg', 'jpeg', 'webp', 'svg', 'pdf', 'json']:
fig.write_image(file=output_file, format=output_format, width=output_image_width,
height=output_image_height)
else:
logging.error('Output file format not supported:{}'.format(output_format))
return x, y, data_set, info, res, cs_track
def export_peak_list(peak_list: tuple,
output_file_name: Optional[str] = None,
output_format: str = 'csv',
include_side_chain: bool = True) -> dict:
"""
Exports the peak list return from other functions in this module in csv or sparky format
:param peak_list: Output tuple from any peak list/spectrum simulation function from the module Spectra
:type peak_list: tuple
:param output_file_name: output file name
:type output_file_name: str, optional
:param output_format: output format 'csv' or 'sparky', defaults to 'csv'
:type output_format: str, optional
:param include_side_chain: whether or not include side chain resonances in the output, defaults to True
:type include_side_chain: bool, optional
:return: data dictionary {'column header1':[values],'column header1':[values]..}
:rtype: dict
"""
back_bone = ['H', 'N', 'C', 'CA']
if output_format == 'csv':
csv_dict = {'sequence': [],
'chem_comp_ID': [],
'X_shift': [],
'Y_shift': [],
'X_atom_name': [],
'Y_atom_name': []}
for i in range(len(peak_list[0])):
atom_x = peak_list[3][i].split("-")[5]
atom_y = peak_list[3][i].split("-")[6]
if not include_side_chain:
if atom_x in back_bone and atom_y in back_bone:
csv_dict['sequence'].append(peak_list[3][i].split("-")[3])
csv_dict['chem_comp_ID'].append(peak_list[3][i].split("-")[4])
csv_dict['X_shift'].append(peak_list[0][i])
csv_dict['Y_shift'].append(peak_list[1][i])
csv_dict['X_atom_name'].append(peak_list[3][i].split("-")[5])
csv_dict['Y_atom_name'].append(peak_list[3][i].split("-")[6])
else:
csv_dict['sequence'].append(peak_list[3][i].split("-")[3])
csv_dict['chem_comp_ID'].append(peak_list[3][i].split("-")[4])
csv_dict['X_shift'].append(peak_list[0][i])
csv_dict['Y_shift'].append(peak_list[1][i])
csv_dict['X_atom_name'].append(peak_list[3][i].split("-")[5])
csv_dict['Y_atom_name'].append(peak_list[3][i].split("-")[6])
if output_file_name is not None:
fo = open(output_file_name, 'w')
fo.write('sequence,chem_comp_ID,X_shift,Y_shift,X_atom_name,Y_atom_name\n')
for i in range(len(csv_dict['sequence'])):
fo.write('{},{},{},{},{},{}\n'.format(csv_dict['sequence'][i],
csv_dict['chem_comp_ID'][i],
round(float(csv_dict['X_shift'][i]), 3),
round(float(csv_dict['Y_shift'][i]), 3),
csv_dict['X_atom_name'][i],
csv_dict['Y_atom_name'][i]))
fo.close()
elif output_format == 'sparky':
csv_dict = {'Assignment': [],
'w1': [],
'w2': []}
for i in range(len(peak_list[0])):
atom_x = peak_list[3][i].split("-")[5]
atom_y = peak_list[3][i].split("-")[6]
res = peak_list[3][i].split("-")[4]
try:
assignment = '{}{}{}-{}'.format(ChemicalShiftStatistics.one_letter_code[res],
peak_list[3][i].split("-")[3],
atom_x, atom_y)
except KeyError:
assignment = '{}{}{}-{}'.format('X',
peak_list[3][i].split("-")[3],
atom_x, atom_y)
if not include_side_chain:
if atom_x in back_bone and atom_y in back_bone:
csv_dict['Assignment'].append(assignment)
csv_dict['w1'].append(peak_list[0][i])
csv_dict['w2'].append(peak_list[1][i])
else:
csv_dict['Assignment'].append(assignment)
csv_dict['w1'].append(peak_list[0][i])
csv_dict['w2'].append(peak_list[1][i])
if output_file_name is not None:
fo = open(output_file_name, 'w')
fo.write('Assignment \t{:>6}\t\t{:>6}\n\n'.format('w1', 'w2'))
for i in range(len(csv_dict['Assignment'])):
fo.write('{:<10}\t\t{:>6}\t\t{:>6}\n'.format(csv_dict['Assignment'][i],
round(float(csv_dict['w1'][i]), 3),
round(float(csv_dict['w2'][i]), 3)))
fo.close()
else:
logging.error('Output format not supported')
raise ValueError('Output format not supported')
return csv_dict
#
# if __name__ == "__main__":
#
# # The following script is to generate example figures for readthedocs
# n15hsqc(bmrb_ids=[17074,17076,17077],legend='dataset',output_format='jpg',
# output_file='../docs/_images/sample_n15hsqc.jpg',show_visualization=False,
# draw_trace=True)
# n15hsqc(bmrb_ids=[17074, 17076, 17077], legend='dataset', output_format='html',
# output_file='../docs/_static/sample_n15hsqc.html', show_visualization=False,
# draw_trace=True)
# c13hsqc(bmrb_ids=15060,legend='residue',output_file='../docs/_images/sample_c13hsqc.jpg',
# output_format='jpg',show_visualization=False)
# c13hsqc(bmrb_ids=15060, legend='residue', output_file='../docs/_static/sample_c13hsqc.html',
# output_format='html', show_visualization=False)
# n15hsqc(bmrb_ids=[17076, 17077], input_file_names='tests/test_data/MyData.str',
# legend='dataset',output_format='jpg',
# output_file='../docs/_images/quick_start_n15hsqc_compare.jpg',
# show_visualization=False)
# n15hsqc(bmrb_ids=[17076, 17077], input_file_names='tests/test_data/MyData.str',
# legend='dataset', output_format='html',
# output_file='../docs/_static/quick_start_n15hsqc_compare.html',
# show_visualization=False)
# n15hsqc(bmrb_ids=[17076, 17077], input_file_names='tests/test_data/MyData.str',
# legend='dataset', output_format='html',draw_trace=True,
# output_file='../docs/_static/quick_start_n15hsqc_compare2.html',
# show_visualization=False)
# n15hsqc(bmrb_ids=[17076, 17077], peak_list='tests/test_data/test_peak_list.csv',
# legend='dataset', output_format='jpg',
# output_file='../docs/_images/quick_start_n15_peaklist.jpg',
# show_visualization=False)
# n15hsqc(bmrb_ids=15060,
# legend='residue',
# output_format='jpg',
# output_file='../docs/_images/example1.jpg',
# show_visualization=False)
# n15hsqc(bmrb_ids=15060,
# legend='residue',
# output_format='html',
# output_file='../docs/_static/example1.html',
# show_visualization=False)
# n15hsqc(bmrb_ids=[17076, 17077],
# input_file_names='tests/test_data/MyData.str',
# legend='dataset',
# output_format='jpg',
# output_file='../docs/_images/example2.jpg',
# show_visualization=False
# )
# n15hsqc(bmrb_ids=[17076, 17077],
# input_file_names='tests/test_data/MyData.str',
# legend='dataset',
# output_format='html',
# output_file='../docs/_static/example2.html',
# show_visualization=False
# )
# n15hsqc(bmrb_ids=[17076, 17077],
# input_file_names='tests/test_data/MyData.str',
# peak_list='tests/test_data/test_peak_list.csv',
# legend='dataset',
# output_format='jpg',
# output_file='../docs/_images/example3.jpg',
# show_visualization=False
# )
# n15hsqc(bmrb_ids=[17076, 17077],
# input_file_names='tests/test_data/MyData.str',
# peak_list='tests/test_data/test_peak_list.csv',
# legend='dataset',
# output_format='html',
# output_file='../docs/_static/example3.html',
# show_visualization=False
# )
# n15hsqc(bmrb_ids=[17076, 17077],
# input_file_names='tests/test_data/MyData.str',
# legend='dataset',
# draw_trace=True,
# output_format='jpg',
# output_file='../docs/_images/example4.jpg',
# show_visualization=False
# )
# n15hsqc(bmrb_ids=[17076, 17077],
# input_file_names='tests/test_data/MyData.str',
# legend='dataset',
# draw_trace=True,
# output_format='html',
# output_file='../docs/_static/example4.html',
# show_visualization=False
# )
# c13hsqc(bmrb_ids=15060,
# legend='residue',
# output_format='jpg',
# output_file='../docs/_images/example5.jpg',
# show_visualization=False)
# c13hsqc(bmrb_ids=15060,
# legend='residue',
# output_format='html',
# output_file='../docs/_static/example5.html',
# show_visualization=False)
# c13hsqc(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# output_format='jpg',
# output_file='../docs/_images/example6.jpg',
# show_visualization=False
# )
# c13hsqc(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# output_format='html',
# output_file='../docs/_static/example6.html',
# show_visualization=False
# )
# c13hsqc(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# draw_trace=True,
# output_format='jpg',
# output_file='../docs/_images/example7.jpg',
# show_visualization=False
# )
# c13hsqc(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# draw_trace=True,
# output_format='html',
# output_file='../docs/_static/example7.html',
# show_visualization=False
# )
# tocsy(bmrb_ids=15060,
# legend='residue',
# output_format='jpg',
# output_file='../docs/_images/example8.jpg',
# show_visualization=False)
# tocsy(bmrb_ids=15060,
# legend='residue',
# output_format='html',
# output_file='../docs/_static/example8.html',
# show_visualization=False)
# tocsy(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# output_format='jpg',
# output_file='../docs/_images/example9.jpg',
# show_visualization=False)
# tocsy(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# output_format='html',
# output_file='../docs/_static/example9.html',
# show_visualization=False)
# tocsy(bmrb_ids=[17074, 17076, 17077],
# legend='residue',
# output_format='jpg',
# output_file='../docs/_images/example10.jpg',
# show_visualization=False)
# tocsy(bmrb_ids=[17074, 17076, 17077],
# legend='residue',
# output_format='html',
# output_file='../docs/_static/example10.html',
# show_visualization=False)
# tocsy(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# draw_trace=True,
# output_format='jpg',
# output_file='../docs/_images/example11.jpg',
# show_visualization=False)
# tocsy(bmrb_ids=[17074, 17076, 17077],
# legend='dataset',
# draw_trace=True,
# output_format='html',
# output_file='../docs/_static/example11.html',
# show_visualization=False)
# generic_2d(bmrb_ids=15060,
# atom_x='N',
# atom_y='CB',
# legend='residue',
# output_format='jpg',
# output_file='../docs/_images/example12.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=15060,
# atom_x='N',
# atom_y='CB',
# legend='residue',
# output_format='html',
# output_file='../docs/_static/example12.html',
# show_visualization=False)
# generic_2d(bmrb_ids=[17074,17076,17077],
# atom_x='N',
# atom_y='CB',
# legend='dataset',
# output_format='jpg',
# output_file='../docs/_images/example13.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=[17074,17076,17077],
# atom_x='N',
# atom_y='CB',
# legend='dataset',
# output_format='html',
# output_file='../docs/_static/example13.html',
# show_visualization=False)
# generic_2d(bmrb_ids=[17074, 17076, 17077],
# atom_x='N',
# atom_y='CB',
# legend='dataset',
# draw_trace=True,
# output_format='jpg',
# output_file='../docs/_images/example14.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=[17074, 17076, 17077],
# atom_x='N',
# atom_y='CB',
# legend='dataset',
# draw_trace=True,
# output_format='html',
# output_file='../docs/_static/example14.html',
# show_visualization=False)
#
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_preceding=True,
# output_format='jpg',
# output_file='../docs/_images/example15.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_preceding=True,
# output_format='html',
# output_file='../docs/_static/example15.html',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_preceding=True,
# seq_walk=True,
# output_format='jpg',
# output_file='../docs/_images/example16.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_preceding=True,
# seq_walk=True,
# output_format='html',
# output_file='../docs/_static/example16.html',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_next=True,
# output_format='jpg',
# output_file='../docs/_images/example17.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_next=True,
# output_format='html',
# output_file='../docs/_static/example17.html',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_next=True,
# seq_walk=True,
# output_format='jpg',
# output_file='../docs/_images/example18.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_next=True,
# seq_walk=True,
# output_format='html',
# output_file='../docs/_static/example18.html',
# show_visualization=False)
#
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_next=True,
# full_walk=True,
# output_format='jpg',
# output_file='../docs/_images/example19.jpg',
# show_visualization=False)
# generic_2d(bmrb_ids=15000,
# atom_x='N',
# atom_y='CA',
# legend='residue',
# include_next=True,
# full_walk=True,
# output_format='html',
# output_file='../docs/_static/example19.html',
# show_visualization=False)
#
| 53.002502
| 119
| 0.537374
| 10,502
| 84,751
| 4.141497
| 0.041706
| 0.022256
| 0.027038
| 0.013519
| 0.920817
| 0.912057
| 0.902653
| 0.89794
| 0.893526
| 0.885249
| 0
| 0.024778
| 0.345223
| 84,751
| 1,598
| 120
| 53.03567
| 0.758997
| 0.355441
| 0
| 0.798097
| 0
| 0
| 0.109258
| 0.007772
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010571
| false
| 0.002114
| 0.006342
| 0
| 0.027484
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
622f64bd979697518c0da8793b5d41153355f353
| 12,781
|
py
|
Python
|
test/gst-vaapi/encode/hevc.py
|
wangzj0601/vaapi-fits
|
df3188dc6624492ba941e4d0e74c2a0ffdf36759
|
[
"BSD-3-Clause"
] | null | null | null |
test/gst-vaapi/encode/hevc.py
|
wangzj0601/vaapi-fits
|
df3188dc6624492ba941e4d0e74c2a0ffdf36759
|
[
"BSD-3-Clause"
] | null | null | null |
test/gst-vaapi/encode/hevc.py
|
wangzj0601/vaapi-fits
|
df3188dc6624492ba941e4d0e74c2a0ffdf36759
|
[
"BSD-3-Clause"
] | null | null | null |
###
### Copyright (C) 2018-2019 Intel Corporation
###
### SPDX-License-Identifier: BSD-3-Clause
###
from ....lib import *
from ..util import *
spec8 = load_test_spec("hevc", "encode", "8bit")
spec10 = load_test_spec("hevc", "encode", "10bit")
def check_psnr(params):
call(
"gst-launch-1.0 -vf filesrc location={encoded}"
" ! h265parse ! vaapih265dec"
" ! videoconvert ! video/x-raw,format={mformatu}"
" ! checksumsink2 file-checksum=false frame-checksum=false"
" plane-checksum=false dump-output=true qos=false"
" dump-location={decoded}".format(
mformatu = mapformatu(params["format"]), **params
)
)
get_media().baseline.check_psnr(
psnr = calculate_psnr(
params["source"], params["decoded"],
params["width"], params["height"],
params["frames"], params["format"]),
context = params.get("refctx", []),
)
def check_bitrate_vbr(params):
# calculate actual bitrate
encsize = os.path.getsize(params["encoded"])
bitrate_actual = encsize * 8 * params["fps"] / 1024.0 / params["frames"]
get_media()._set_test_details(
size_encoded = encsize,
bitrate_actual = "{:-.2f}".format(bitrate_actual))
# acceptable bitrate within 25% of minrate and 10% of maxrate
assert(params["minrate"] * 0.75 <= bitrate_actual <= params["maxrate"] * 1.10)
#-------------------------------------------------#
#--------------------- CQP 8 ---------------------#
#-------------------------------------------------#
@slash.requires(have_gst)
@slash.requires(*have_gst_element("vaapi"))
@slash.requires(*have_gst_element("vaapih265enc"))
@slash.requires(*have_gst_element("vaapih265dec"))
@slash.requires(*have_gst_element("checksumsink2"))
@slash.parametrize(*gen_hevc_cqp_parameters(spec8, ['main']))
@platform_tags(HEVC_ENCODE_8BIT_PLATFORMS)
def test_8bit_cqp(case, gop, slices, bframes, qp, quality, profile):
params = spec8[case].copy()
mprofile = mapprofile("hevc-8", profile)
if mprofile is None:
slash.skip_test("{} profile is not supported".format(profile))
params.update(
gop = gop, slices = slices, bframes = bframes, qp = qp, quality = quality,
profile = mprofile, mformat = mapformat(params["format"]))
params["encoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{qp}-{quality}-{profile}"
".h265".format(case, **params))
params["decoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{qp}-{quality}-{profile}-{width}x{height}-{format}"
".yuv".format(case, **params))
call(
"gst-launch-1.0 -vf filesrc location={source} num-buffers={frames}"
" ! rawvideoparse format={mformat} width={width} height={height}"
" ! videoconvert ! video/x-raw,format=NV12"
" ! vaapih265enc rate-control=cqp init-qp={qp} quality-level={quality}"
" keyframe-period={gop} num-slices={slices} max-bframes={bframes}"
" ! video/x-h265,profile={profile} ! h265parse"
" ! filesink location={encoded}".format(**params))
check_psnr(params)
#-------------------------------------------------#
#--------------------- CBR 8 ---------------------#
#-------------------------------------------------#
@slash.requires(have_gst)
@slash.requires(*have_gst_element("vaapi"))
@slash.requires(*have_gst_element("vaapih265enc"))
@slash.requires(*have_gst_element("vaapih265dec"))
@slash.requires(*have_gst_element("checksumsink2"))
@slash.parametrize(*gen_hevc_cbr_parameters(spec8, ['main']))
@platform_tags(HEVC_ENCODE_8BIT_PLATFORMS)
def test_8bit_cbr(case, gop, slices, bframes, bitrate, fps, profile):
params = spec8[case].copy()
mprofile = mapprofile("hevc-8", profile)
if mprofile is None:
slash.skip_test("{} profile is not supported".format(profile))
params.update(
gop = gop, slices = slices, bframes = bframes, bitrate = bitrate,
profile = mprofile, fps = fps, mformat = mapformat(params["format"]))
params["encoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}"
".h265".format(case, **params))
params["decoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}-{width}x{height}-{format}"
".yuv".format(case, **params))
call(
"gst-launch-1.0 -vf filesrc location={source} num-buffers={frames}"
" ! rawvideoparse format={mformat} width={width} height={height}"
" framerate={fps} ! videoconvert ! video/x-raw,format=NV12"
" ! vaapih265enc rate-control=cbr bitrate={bitrate} keyframe-period={gop}"
" num-slices={slices} max-bframes={bframes}"
" ! video/x-h265,profile={profile} ! h265parse"
" ! filesink location={encoded}".format(**params))
# calculate actual bitrate
encsize = os.path.getsize(params["encoded"])
bitrate_actual = encsize * 8 * params["fps"] / 1024.0 / params["frames"]
bitrate_gap = abs(bitrate_actual - bitrate) / bitrate
get_media()._set_test_details(
size_encoded = encsize,
bitrate_actual = "{:-.2f}".format(bitrate_actual),
bitrate_gap = "{:.2%}".format(bitrate_gap))
assert(bitrate_gap <= 0.10)
check_psnr(params)
#-------------------------------------------------#
#--------------------- VBR 8 ---------------------#
#-------------------------------------------------#
@slash.requires(have_gst)
@slash.requires(*have_gst_element("vaapi"))
@slash.requires(*have_gst_element("vaapih265enc"))
@slash.requires(*have_gst_element("vaapih265dec"))
@slash.requires(*have_gst_element("checksumsink2"))
@slash.parametrize(*gen_hevc_vbr_parameters(spec8, ['main']))
@platform_tags(HEVC_ENCODE_8BIT_PLATFORMS)
def test_8bit_vbr(case, gop, slices, bframes, bitrate, fps, quality, refs, profile):
params = spec8[case].copy()
mprofile = mapprofile("hevc-8", profile)
if mprofile is None:
slash.skip_test("{} profile is not supported".format(profile))
# target percentage 70% (hard-coded in gst-vaapi)
# gst-vaapi sets max-bitrate = bitrate and min-bitrate = bitrate * 0.70
minrate = bitrate
maxrate = int(bitrate / 0.7)
params.update(
gop = gop, slices = slices, bframes = bframes, bitrate = bitrate,
profile = mprofile, fps = fps, quality = quality, refs = refs,
minrate = minrate, maxrate = maxrate, mformat = mapformat(params["format"]))
params["encoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}-{quality}-{refs}"
".h265".format(case, **params))
params["decoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}-{quality}-{refs}"
"-{width}x{height}-{format}"
".yuv".format(case, **params))
call(
"gst-launch-1.0 -vf filesrc location={source} num-buffers={frames}"
" ! rawvideoparse format={mformat} width={width} height={height}"
" framerate={fps} ! videoconvert ! video/x-raw,format=NV12"
" ! vaapih265enc rate-control=vbr bitrate={maxrate} keyframe-period={gop}"
" num-slices={slices} max-bframes={bframes} refs={refs}"
" quality-level={quality} ! video/x-h265,profile={profile} ! h265parse"
" ! filesink location={encoded}".format(**params))
check_bitrate_vbr(params)
check_psnr(params)
#-------------------------------------------------#
#--------------------- CQP 10 --------------------#
#-------------------------------------------------#
@slash.requires(have_gst)
@slash.requires(*have_gst_element("vaapi"))
@slash.requires(*have_gst_element("vaapih265enc"))
@slash.requires(*have_gst_element("vaapih265dec"))
@slash.requires(*have_gst_element("checksumsink2"))
@slash.parametrize(*gen_hevc_cqp_parameters(spec10, ['main10']))
@platform_tags(HEVC_ENCODE_10BIT_PLATFORMS)
def test_10bit_cqp(case, gop, slices, bframes, qp, quality, profile):
params = spec10[case].copy()
mprofile = mapprofile("hevc-10", profile)
if mprofile is None:
slash.skip_test("{} profile is not supported".format(profile))
params.update(
gop = gop, slices = slices, bframes = bframes, qp = qp, quality = quality,
profile = mprofile, mformat = mapformat(params["format"]))
params["encoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{qp}-{quality}-{profile}"
".h265".format(case, **params))
params["decoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{qp}-{quality}-{profile}-{width}x{height}-{format}"
".yuv".format(case, **params))
call(
"gst-launch-1.0 -vf filesrc location={source} num-buffers={frames}"
" ! rawvideoparse format={mformat} width={width} height={height}"
" ! vaapih265enc rate-control=cqp init-qp={qp} quality-level={quality}"
" keyframe-period={gop} num-slices={slices} max-bframes={bframes}"
" ! video/x-h265,profile={profile} ! h265parse"
" ! filesink location={encoded}".format(**params))
check_psnr(params)
#-------------------------------------------------#
#--------------------- CBR 10 --------------------#
#-------------------------------------------------#
@slash.requires(have_gst)
@slash.requires(*have_gst_element("vaapi"))
@slash.requires(*have_gst_element("vaapih265enc"))
@slash.requires(*have_gst_element("vaapih265dec"))
@slash.requires(*have_gst_element("checksumsink2"))
@slash.parametrize(*gen_hevc_cbr_parameters(spec10, ['main10']))
@platform_tags(HEVC_ENCODE_10BIT_PLATFORMS)
def test_10bit_cbr(case, gop, slices, bframes, bitrate, fps, profile):
params = spec10[case].copy()
mprofile = mapprofile("hevc-10", profile)
if mprofile is None:
slash.skip_test("{} profile is not supported".format(profile))
params.update(
gop = gop, slices = slices, bframes = bframes, bitrate = bitrate,
profile = mprofile, fps = fps, mformat = mapformat(params["format"]))
params["encoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}"
".h265".format(case, **params))
params["decoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}-{width}x{height}-{format}"
".yuv".format(case, **params))
call(
"gst-launch-1.0 -vf filesrc location={source} num-buffers={frames}"
" ! rawvideoparse format={mformat} width={width} height={height}"
" framerate={fps} ! vaapih265enc rate-control=cbr bitrate={bitrate}"
" keyframe-period={gop} num-slices={slices} max-bframes={bframes}"
" ! video/x-h265,profile={profile} ! h265parse"
" ! filesink location={encoded}".format(**params))
# calculate actual bitrate
encsize = os.path.getsize(params["encoded"])
bitrate_actual = encsize * 8 * params["fps"] / 1024.0 / params["frames"]
bitrate_gap = abs(bitrate_actual - bitrate) / bitrate
get_media()._set_test_details(
size_encoded = encsize,
bitrate_actual = "{:-.2f}".format(bitrate_actual),
bitrate_gap = "{:.2%}".format(bitrate_gap))
assert(bitrate_gap <= 0.10)
check_psnr(params)
#-------------------------------------------------#
#--------------------- VBR 10 --------------------#
#-------------------------------------------------#
@slash.requires(have_gst)
@slash.requires(*have_gst_element("vaapi"))
@slash.requires(*have_gst_element("vaapih265enc"))
@slash.requires(*have_gst_element("vaapih265dec"))
@slash.requires(*have_gst_element("checksumsink2"))
@slash.parametrize(*gen_hevc_vbr_parameters(spec10, ['main10']))
@platform_tags(HEVC_ENCODE_10BIT_PLATFORMS)
def test_10bit_vbr(case, gop, slices, bframes, bitrate, fps, quality, refs, profile):
params = spec10[case].copy()
mprofile = mapprofile("hevc-10", profile)
if mprofile is None:
slash.skip_test("{} profile is not supported".format(profile))
# target percentage 70% (hard-coded in gst-vaapi)
# gst-vaapi sets max-bitrate = bitrate and min-bitrate = bitrate * 0.70
minrate = bitrate
maxrate = int(bitrate / 0.7)
params.update(
gop = gop, slices = slices, bframes = bframes, bitrate = bitrate,
profile = mprofile, fps = fps, quality = quality, refs = refs,
minrate = minrate, maxrate = maxrate, mformat = mapformat(params["format"]))
params["encoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}-{quality}-{refs}"
".h265".format(case, **params))
params["decoded"] = get_media()._test_artifact(
"{}-{gop}-{slices}-{bframes}-{bitrate}-{profile}-{fps}-{quality}-{refs}"
"-{width}x{height}-{format}"
".yuv".format(case, **params))
call(
"gst-launch-1.0 -vf filesrc location={source} num-buffers={frames}"
" ! rawvideoparse format={mformat} width={width} height={height}"
" framerate={fps} ! vaapih265enc rate-control=vbr bitrate={maxrate}"
" keyframe-period={gop} num-slices={slices} max-bframes={bframes}"
" refs={refs} quality-level={quality} ! video/x-h265,profile={profile}"
" ! h265parse ! filesink location={encoded}".format(**params))
check_bitrate_vbr(params)
check_psnr(params)
| 40.318612
| 85
| 0.640873
| 1,470
| 12,781
| 5.432653
| 0.106803
| 0.048835
| 0.063862
| 0.075131
| 0.928876
| 0.919985
| 0.919985
| 0.919985
| 0.919985
| 0.895066
| 0
| 0.024007
| 0.123308
| 12,781
| 316
| 86
| 40.446203
| 0.68871
| 0.104217
| 0
| 0.801724
| 0
| 0.021552
| 0.362806
| 0.131794
| 0
| 0
| 0
| 0
| 0.012931
| 1
| 0.034483
| false
| 0
| 0.008621
| 0
| 0.043103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6234b7d963c205902ef642a1c8dce294cc61a8a0
| 27,097
|
py
|
Python
|
logdiv/divanalysis/temporal.py
|
pedroramaciotti/diversity-patterns
|
82b48d87b9e8d35e6651243d3cbff0400ec9a58a
|
[
"MIT"
] | null | null | null |
logdiv/divanalysis/temporal.py
|
pedroramaciotti/diversity-patterns
|
82b48d87b9e8d35e6651243d3cbff0400ec9a58a
|
[
"MIT"
] | null | null | null |
logdiv/divanalysis/temporal.py
|
pedroramaciotti/diversity-patterns
|
82b48d87b9e8d35e6651243d3cbff0400ec9a58a
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import time as timelib
import matplotlib.pyplot as plt
# local modules
from . import function
def temporal_analysis(weblog, session_data, classification_column, temporal_analysis_weblog_start, temporal_analysis_weblog_end,\
group_names, weblog_column_dict,micd = False, verbose = False):
"""
Calculate temporal (each hour) number of requests, entropy consummed,
entropy offered and Mean Individual Consummed Diversity along the groups
specified in "group_names"
Parameters
----------
weblog: pandas dataframe of requests
session_data: pandas dataframe of requests
classification_column: pandas dataframe column wanted to be analysed
temporal_analysis_weblog_start: start timestamp
temporal_analysis_weblog_end: end timestamp
group_names: list of string
weblog_columns_dict: dict
Returns
-------
Pandas dataframe
"""
if verbose== True:
start_time_tot = timelib.time()
print("\n * Computing temporal analysis ...")
temporal_analysis_weblog_start = pd.Timestamp(temporal_analysis_weblog_start)
temporal_analysis_weblog_end = pd.Timestamp(temporal_analysis_weblog_end)
t_weblog=weblog[weblog[weblog_column_dict['timestamp_column']]<temporal_analysis_weblog_end]
t_weblog=t_weblog[t_weblog[weblog_column_dict['timestamp_column']]>temporal_analysis_weblog_start]
start_day=pd.Timestamp(t_weblog[weblog_column_dict['timestamp_column']].min()).day
end_day=pd.Timestamp(t_weblog[weblog_column_dict['timestamp_column']].max()).day
if start_day > end_day:
t_days = list(range(start_day,int(temporal_analysis_weblog_start.days_in_month)+1)) + list(range(1,end_day+1))
else :
t_days = list(range(int(start_day),int(end_day)+1))
t_hours=list(range(0,24))
t_activity_columns = ['t_activity_'+group_name for group_name in group_names]
t_consumed_diversity_columns = ['t_consumed_diversity_'+group_name for group_name in group_names]
t_offered_diversity_columns = ['t_offered_diversity_'+group_name for group_name in group_names]
if micd: t_mean_ind_cons_div_columns = ['t_mean_ind_cons_div_'+group_name for group_name in group_names]
if micd: column_names = ['start_time','end_time','t_activity_total','t_consumed_diversity_total','t_offered_diversity_total','t_mean_ind_cons_div_total']\
+t_activity_columns+t_consumed_diversity_columns + t_offered_diversity_columns + t_mean_ind_cons_div_columns
else: column_names = ['start_time','end_time','t_activity_total','t_consumed_diversity_total','t_offered_diversity_total']\
+t_activity_columns+t_consumed_diversity_columns + t_offered_diversity_columns
timeseries_data=pd.DataFrame(columns=column_names)
for columns in timeseries_data:
timeseries_data[columns] = np.zeros(len(t_days)*len(t_hours))
counter=0
year = temporal_analysis_weblog_start.year
month = temporal_analysis_weblog_start.month
yesterday = temporal_analysis_weblog_start.day
try:
import tqdm
for day in tqdm.tqdm(t_days):
if day < yesterday: month += 1
if month > 12:
month = 1
year += 1
for hour in t_hours:
start_time=pd.Timestamp('%d-%d-%d %s:00:00'%(year,month,day,function.zf(hour)))
end_time=pd.Timestamp('%d-%d-%d %s:59:59'%(year,month,day,function.zf(hour)))
timeseries_data['start_time'][counter] = start_time
timeseries_data['end_time'][counter] = end_time
hour_weblog=t_weblog[t_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))>start_time]
hour_weblog=hour_weblog[hour_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))<end_time]
hour_session = session_data[session_data.session_id.isin(hour_weblog.session_id.unique())]
# Total
timeseries_data['t_activity_total'][counter]=hour_weblog.shape[0]
if hour_weblog.shape[0]>0:
pa_consumed,aux=function.proportional_abundance(hour_weblog,'requested_'+classification_column)
pa_offered,aux=function.proportional_abundance(hour_weblog.drop_duplicates\
(subset=weblog_column_dict['requested_page_column']),'requested_'+classification_column)
timeseries_data['t_consumed_diversity_total'][counter]=function.ShannonEntropy(pa_consumed)
timeseries_data['t_offered_diversity_total'][counter]=function.ShannonEntropy(pa_offered)
if micd: timeseries_data['t_mean_ind_cons_div_total'][counter]=hour_session[classification_column+'_entropy'].mean()
# Groups
for group_name in group_names:
list_sessions = session_data[session_data[group_name]].session_id.values
weblog_tmp = hour_weblog[hour_weblog.session_id.isin(list_sessions)]
session_tmp = hour_session[hour_session.session_id.isin(list_sessions)]
timeseries_data['t_activity_'+group_name][counter] = weblog_tmp.shape[0]
if weblog_tmp.shape[0]>0:
pa_consumed, aux = function.proportional_abundance(weblog_tmp,'requested_'+classification_column)
pa_offered,aux=function.proportional_abundance(weblog_tmp.drop_duplicates\
(subset=weblog_column_dict['requested_page_column']),'requested_'+classification_column)
timeseries_data['t_consumed_diversity_'+group_name][counter] = function.ShannonEntropy(pa_consumed)
timeseries_data['t_offered_diversity_'+group_name][counter]=function.ShannonEntropy(pa_offered)
if micd: timeseries_data['t_mean_ind_cons_div_'+group_name][counter]=session_tmp[classification_column+'_entropy'].mean()
# Aux
counter+=1
del hour_weblog
yesterday = day
except ImportError:
for day in t_days:
if day < yesterday: month += 1
if month > 12:
month = 1
year += 1
for hour in t_hours:
start_time=pd.Timestamp('%d-%d-%d %s:00:00'%(year,month,day,function.zf(hour)))
end_time=pd.Timestamp('%d-%d-%d %s:59:59'%(year,month,day,function.zf(hour)))
timeseries_data['start_time'][counter] = start_time
timeseries_data['end_time'][counter] = end_time
hour_weblog=t_weblog[t_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))>start_time]
hour_weblog=hour_weblog[hour_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))<end_time]
hour_session = session_data[session_data.session_id.isin(hour_weblog.session_id.unique())]
# Total
timeseries_data['t_activity_total'][counter]=hour_weblog.shape[0]
if hour_weblog.shape[0]>0:
pa_consumed,aux=function.proportional_abundance(hour_weblog,'requested_'+classification_column)
pa_offered,aux=function.proportional_abundance(hour_weblog.drop_duplicates\
(subset=weblog_column_dict['requested_page_column']),'requested_'+classification_column)
timeseries_data['t_consumed_diversity_total'][counter]=function.ShannonEntropy(pa_consumed)
timeseries_data['t_offered_diversity_total'][counter]=function.ShannonEntropy(pa_offered)
if micd: timeseries_data['t_mean_ind_cons_div_total'][counter]=hour_session[classification_column+'_entropy'].mean()
# Groups
for group_name in group_names:
list_sessions = session_data[session_data[group_name]].session_id.values
weblog_tmp = hour_weblog[hour_weblog.session_id.isin(list_sessions)]
session_tmp = hour_session[hour_session.session_id.isin(list_sessions)]
timeseries_data['t_activity_'+group_name][counter] = weblog_tmp.shape[0]
if weblog_tmp.shape[0]>0:
pa_consumed, aux = function.proportional_abundance(weblog_tmp,'requested_'+classification_column)
pa_offered,aux=function.proportional_abundance(weblog_tmp.drop_duplicates\
(subset=weblog_column_dict['requested_page_column']),'requested_'+classification_column)
timeseries_data['t_consumed_diversity_'+group_name][counter] = function.ShannonEntropy(pa_consumed)
timeseries_data['t_offered_diversity_'+group_name][counter]=function.ShannonEntropy(pa_offered)
if micd: timeseries_data['t_mean_ind_cons_div_'+group_name][counter]=session_tmp[classification_column+'_entropy'].mean()
# Aux
counter+=1
del hour_weblog
yesterday = day
if verbose == True:
print(" Temporal analysis computed in %.1f seconds."%(timelib.time() - start_time_tot))
return timeseries_data;
def temporal_analysis_article(weblog, classification_column_diversity, classification_column_transaction, transaction,temporal_analysis_weblog_start, temporal_analysis_weblog_end, weblog_column_dict,verbose = False):
"""
Calculate temporal (each 6 hours) number of requests article -> article and
number of requests article -> article that have changed classification
Parameters
----------
weblog: pandas dataframe of requests
classification_column_diversity: pandas dataframe column wanted to be analysed
classification_column_transaction: pandas dataframe column wanted to be selected for transaction
transaction: string, belonging to the items of classification_column_transaction
temporal_analysis_weblog_start: start timestamp
temporal_analysis_weblog_end: end timestamp
weblog_column_dict: dict
Returns
-------
Pandas dataframe
"""
if verbose== True:
start_time_tot = timelib.time()
print("\n * Computing temporal analysis on number of article ...")
temporal_analysis_weblog_start = pd.Timestamp(temporal_analysis_weblog_start)
temporal_analysis_weblog_end = pd.Timestamp(temporal_analysis_weblog_end)
t_weblog=weblog[weblog[weblog_column_dict['timestamp_column']]<temporal_analysis_weblog_end]
t_weblog=t_weblog[t_weblog[weblog_column_dict['timestamp_column']]>temporal_analysis_weblog_start]
start_day=pd.Timestamp(t_weblog[weblog_column_dict['timestamp_column']].min()).day
end_day=pd.Timestamp(t_weblog[weblog_column_dict['timestamp_column']].max()).day
if start_day > end_day:
t_days = list(range(start_day,int(temporal_analysis_weblog_start.days_in_month)+1)) + list(range(1,end_day+1))
else :
t_days = list(range(int(start_day),int(end_day)+1))
t_hours=list(range(0,24))
column_names = ['start_time','end_time','t_activity_article','t_activity_article_change_class']
timeseries_data=pd.DataFrame(columns=column_names)
for columns in timeseries_data:
timeseries_data[columns] = np.zeros(len(t_days)*len(t_hours))
counter=0
year = temporal_analysis_weblog_start.year
month = temporal_analysis_weblog_start.month
yesterday = temporal_analysis_weblog_start.day
for day in t_days:
if day < yesterday: month += 1
if month > 12:
month = 1
year += 1
hour = 0
for i in range(4):
start_time=pd.Timestamp('%d-%d-%d %s:00:00'%(year,month,day,function.zf(hour)))
end_time=pd.Timestamp('%d-%d-%d %s:59:59'%(year,month,day,str(int(function.zf(hour))+5)))
timeseries_data['start_time'][counter:counter+6] = start_time
timeseries_data['end_time'][counter:counter+6] = end_time
hour_weblog=t_weblog[t_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))>start_time]
hour_weblog=hour_weblog[hour_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))<end_time]
hour_weblog=hour_weblog[(hour_weblog['requested_'+classification_column_transaction] == transaction) & (hour_weblog['referrer_'+classification_column_transaction] == transaction)]
# number of requests article article per hour
timeseries_data['t_activity_article'][counter:counter+6]=hour_weblog.shape[0]
hour_weblog=hour_weblog[hour_weblog['requested_'+classification_column_diversity] != hour_weblog['referrer_'+classification_column_diversity]]
#number of requests article article that have changed class
timeseries_data['t_activity_article_change_class'][counter:counter+6] = hour_weblog.shape[0]
# Aux
counter+=6
hour+=6
del hour_weblog
yesterday = day
if verbose == True:
print(" Temporal analysis on number of article computed in %.1f seconds."%(timelib.time() - start_time_tot))
return timeseries_data;
def temporal_analysis_article_day(weblog,classification_column_diversity, classification_column_transaction, transaction, temporal_analysis_weblog_start, temporal_analysis_weblog_end, weblog_column_dict,verbose = False):
"""
Calculate temporal (each day) number of requests article -> article and
number of requests article -> article that have changed class
Parameters
----------
weblog: pandas dataframe of requests
classification_column: pandas dataframe column wanted to be analysed
classification_column_transaction: pandas dataframe column wanted to be selected for transaction
transaction: string, belonging to the items of classification_column_transaction
temporal_analysis_weblog_start: start timestamp
temporal_analysis_weblog_end: end timestamp
weblog_column_dict: dict
Returns
-------
Pandas dataframe
"""
if verbose== True:
start_time_tot = timelib.time()
print("\n * Computing temporal analysis on number of article ...")
temporal_analysis_weblog_start = pd.Timestamp(temporal_analysis_weblog_start)
temporal_analysis_weblog_end = pd.Timestamp(temporal_analysis_weblog_end)
t_weblog=weblog[weblog[weblog_column_dict['timestamp_column']]<temporal_analysis_weblog_end]
t_weblog=t_weblog[t_weblog[weblog_column_dict['timestamp_column']]>temporal_analysis_weblog_start]
start_day=pd.Timestamp(t_weblog[weblog_column_dict['timestamp_column']].min()).day
end_day=pd.Timestamp(t_weblog[weblog_column_dict['timestamp_column']].max()).day
if start_day > end_day:
t_days = list(range(start_day,int(temporal_analysis_weblog_start.days_in_month)+1)) + list(range(1,end_day+1))
else :
t_days = list(range(int(start_day),int(end_day)+1))
t_hours=list(range(0,24))
column_names = ['start_time','end_time','t_activity_article','t_activity_article_change_class']
timeseries_data=pd.DataFrame(columns=column_names)
for columns in timeseries_data:
timeseries_data[columns] = np.zeros(len(t_days)*len(t_hours))
counter=0
year = temporal_analysis_weblog_start.year
month = temporal_analysis_weblog_start.month
yesterday = temporal_analysis_weblog_start.day
for day in t_days:
if day < yesterday: month += 1
if month > 12:
month = 1
year += 1
start_time=pd.Timestamp('%d-%d-%d 00:00:00'%(year,month,day))
end_time=pd.Timestamp('%d-%d-%d 23:59:59'%(year,month,day))
timeseries_data['start_time'][counter:counter+24] = start_time
timeseries_data['end_time'][counter:counter+24] = end_time
day_weblog=t_weblog[t_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))>start_time]
day_weblog=day_weblog[day_weblog[weblog_column_dict['timestamp_column']].apply(lambda x: pd.Timestamp(x))<end_time]
day_weblog=day_weblog[(day_weblog['requested_'+classification_column_diversity] == transaction) & (day_weblog['referrer_'+classification_column_diversity] == transaction)]
# number of requests article article per hour
timeseries_data['t_activity_article'][counter:counter+24]=day_weblog.shape[0]
day_weblog=day_weblog[day_weblog['requested_'+classification_column_diversity] != day_weblog['referrer_'+classification_column_diversity]]
#number of requests article article that have changed class
timeseries_data['t_activity_article_change_class'][counter:counter+24] = day_weblog.shape[0]
# Aux
counter+=24
del day_weblog
yesterday = day
if verbose == True:
print(" Temporal analysis on number of article computed in %.1f seconds."%(timelib.time() - start_time_tot))
return timeseries_data;
def plot_temporal(timeseries_data, group_names, micd = False, filename = None, verbose = False):
"""
Plot temporal analysis with timeseries_data calculated with "temporal_analysis"
Parameters
----------
timeseries_data: pandas dataframe given by temporal functions
group_names: list of string
micd: bool, if Mean Individual Consummed Diversity is wanted
Returns
-------
None
"""
if verbose== True:
start_time = timelib.time()
print("\n * Plotting temporal analysis ...")
filter_size=1
filter_array=(1/filter_size)*np.ones(filter_size)
first_date=pd.Timestamp(timeseries_data['start_time'].min())
last_date=pd.Timestamp(timeseries_data['end_time'].max())
if first_date.day > last_date.day:
list_date = ['%d/%d/%d'%(first_date.year,first_date.month,n) for n in range(first_date.day,first_date.days_in_month+1)]+\
['%d/%d/%d'%(last_date.year,last_date.month,n) for n in range(1,last_date.day+1)]
number_of_days=len(list_date)
else :
list_date = ['%d/%d/%d'%(first_date.year,first_date.month,n) for n in range(first_date.day,last_date.day+1)]
number_of_days=len(list_date)
we_days = [pd.Timestamp(n).day for n in timeseries_data['start_time'] if (pd.Timestamp(n).dayofweek == 5 or \
pd.Timestamp(n).dayofweek == 6)]
we_days = list(set(we_days))
# Figure setting
if micd: fig,(ax1,ax2,ax3,ax4)=plt.subplots(4,1,figsize=(20,15))
else: fig,(ax1,ax2,ax3)=plt.subplots(3,1,figsize=(20,15))
# Activity
ax1.plot(range(len(timeseries_data['t_activity_total'])),np.convolve(timeseries_data['t_activity_total'],filter_array,mode='same'))
for group_name in group_names:
ax1.plot(range(len(timeseries_data['t_activity_'+group_name])),np.convolve(timeseries_data['t_activity_'+group_name],filter_array,mode='same'))
ax1.set_xticks([12+n*24 for n in range(0,number_of_days)])
#ax1.set_xticklabels(list_date,fontsize=11)
ax1.set_xticklabels(['','',''],fontsize=11)
ax1.grid(False)
ax1.set_yscale('log')
ax1.set_xlim((0,len(timeseries_data['t_activity_total'])))
for n in range(0,number_of_days+1):#painting the lines dividing the days
ax1.axvline(x=n*24,color='k',linestyle=':')
for we_day in we_days: # painting the weekend days
ax1.axvspan(24*(we_day-first_date.day), 24*(we_day-first_date.day+1), facecolor='green', edgecolor='none', alpha=.2)
ax1.set_title('Hourly Activity',fontsize=14)
ax1.set_ylabel('Requests',fontsize=14)
ax_names = ['Total'] + group_names
x_legend = 1.09- 0.05*number_of_days
ax1.legend(ax_names,loc='best', bbox_to_anchor=(x_legend, 0., 0.5, 0.5)) # option to deplace the legend
# Consumed diversity
ax2.plot(range(len(timeseries_data['t_consumed_diversity_total'])),\
np.convolve(np.power(2,timeseries_data['t_consumed_diversity_total']),filter_array,mode='same'))
for group_name in group_names:
ax2.plot(range(len(timeseries_data['t_consumed_diversity_'+group_name])),\
np.convolve(np.power(2,timeseries_data['t_consumed_diversity_'+group_name]),filter_array,mode='same'))
ax2.set_xticks([12+n*24 for n in range(0,number_of_days)])
#ax2.set_xticklabels(list_date,fontsize=11)
ax2.set_xticklabels(['','',''],fontsize=11)
ax2.grid(False)
ax2.set_xlim((0,len(timeseries_data['t_activity_total'])))
for n in range(0,number_of_days+1):#painting the lines dividing the days
ax2.axvline(x=n*24,color='k',linestyle=':')
for we_day in we_days: # painting the weekend days
ax2.axvspan(24*(we_day-first_date.day), 24*(we_day-first_date.day+1), facecolor='green', edgecolor='none', alpha=.2)
ax2.set_ylabel('Cons. IEUC',fontsize=14)
# Offered diversity
ax3.plot(range(len(timeseries_data['t_offered_diversity_total'])),\
np.convolve(np.power(2,timeseries_data['t_offered_diversity_total']),filter_array,mode='same'))
for group_name in group_names:
ax3.plot(range(len(timeseries_data['t_offered_diversity_'+group_name])),\
np.convolve(np.power(2,timeseries_data['t_offered_diversity_'+group_name]),filter_array,mode='same'))
ax3.set_xticks([12+n*24 for n in range(0,number_of_days)])
ax3.set_xticklabels(['','',''],fontsize=11)
if not micd: ax3.set_xticklabels(list_date,fontsize=11)
ax3.grid(False)
ax3.set_xlim((0,len(timeseries_data['t_activity_total'])))
for n in range(0,number_of_days+1):#painting the lines dividing the days
ax3.axvline(x=n*24,color='k',linestyle=':')
for we_day in we_days: # painting the weekend days
ax3.axvspan(24*(we_day-first_date.day), 24*(we_day-first_date.day+1), facecolor='green', edgecolor='none', alpha=.2)
ax3.set_ylabel('Off. IEUC',fontsize=14)
# Mean Ind. Cons. Diversity
if micd:
ax4.plot(range(len(timeseries_data['t_mean_ind_cons_div_total'])),\
np.convolve(np.power(2,timeseries_data['t_mean_ind_cons_div_total']),filter_array,mode='same'))
for group_name in group_names:
ax4.plot(range(len(timeseries_data['t_mean_ind_cons_div_'+group_name])),\
np.convolve(np.power(2,timeseries_data['t_mean_ind_cons_div_'+group_name]),filter_array,mode='same'))
ax4.set_xticks([12+n*24 for n in range(0,number_of_days)])
ax4.set_xticklabels(list_date,fontsize=11)
ax4.grid(False)
ax4.set_xlim((0,len(timeseries_data['t_activity_total'])))
for n in range(0,number_of_days+1):#painting the lines dividing the days
ax4.axvline(x=n*24,color='k',linestyle=':')
for we_day in we_days: # painting the weekend days
ax4.axvspan(24*(we_day-first_date.day), 24*(we_day-first_date.day+1), facecolor='green', edgecolor='none', alpha=.2)
ax4.set_xlabel('Time',fontsize=14)
ax4.set_ylabel('M.I.C.IEUC',fontsize=14)
# Saving
week_graph_length=10
weeks_in_graph=number_of_days/7
fig.set_size_inches(week_graph_length*weeks_in_graph, 5)
if filename is not None:
plt.savefig('./%s.pdf'%filename, bbox_inches = 'tight') # bbox in order to save the legend
plt.show()
if verbose == True:
print(" Temporal analysis plotted in %.1f seconds."%(timelib.time() - start_time))
plt.clf()
plt.close()
return;
def plot_temporal_article(timeseries_data, filename = None, verbose = False):
"""
Plot temporal analysis with timeseries_data calculated temporal_analysis_article,
plot #(requests art-art that have changed class)/#(requests art-art)
Parameters
----------
timeseries_data: pandas dataframe given by temporal functions
Returns
-------
None
"""
if verbose== True:
start_time = timelib.time()
print("\n * Plotting temporal analysis on number of article ...")
filter_size=1
filter_array=(1/filter_size)*np.ones(filter_size)
first_date=pd.Timestamp(timeseries_data['start_time'].min())
last_date=pd.Timestamp(timeseries_data['end_time'].max())
if first_date.day > last_date.day:
list_date = ['%d/%d/%d'%(first_date.year,first_date.month,n) for n in range(first_date.day,first_date.days_in_month+1)]+\
['%d/%d/%d'%(last_date.year,last_date.month,n) for n in range(1,last_date.day+1)]
number_of_days=len(list_date)
else :
list_date = ['%d/%d/%d'%(first_date.year,first_date.month,n) for n in range(first_date.day,last_date.day+1)]
number_of_days=len(list_date)
we_days = [pd.Timestamp(n).day for n in timeseries_data['start_time'] if (pd.Timestamp(n).dayofweek == 5 or \
pd.Timestamp(n).dayofweek == 6)]
we_days = list(set(we_days))
# Figure setting
fig,ax1=plt.subplots(1,1)
# Activity
ax1.plot(range(len(timeseries_data['t_activity_article'])),np.convolve(timeseries_data['t_activity_article_change_class']/timeseries_data['t_activity_article'],filter_array,mode='same'))
ax1.set_xticks([12+n*24 for n in range(0,number_of_days)])
ax1.set_xticklabels(list_date,fontsize=11)
ax1.grid(False)
ax1.set_yscale('log')
ax1.set_xlim((0,len(timeseries_data['t_activity_article'])))
for n in range(0,number_of_days+1):#painting the lines dividing the days
ax1.axvline(x=n*24,color='k',linestyle=':')
for we_day in we_days: # painting the weekend days
ax1.axvspan(24*(we_day-first_date.day), 24*(we_day-first_date.day+1), facecolor='green', edgecolor='none', alpha=.2)
ax1.set_title('#(requêtes art-art qui changent de class)/#(requêtes art-art)',fontsize=14)
ax1.set_ylabel('Requests',fontsize=14)
#ax_names= ['Total','Sessions \nwith more than \n4 requests','Sessions\noriginated\nin search\npages','Sessions\noriginated\nin social\nplatforms']
# Saving
week_graph_length=10
weeks_in_graph=number_of_days/7
fig.set_size_inches(week_graph_length*weeks_in_graph, 5)
if filename is not None:
plt.savefig('./%s.pdf'%filename, bbox_inches = 'tight') # bbox in order to save the legend
plt.show()
if verbose == True:
print(" Temporal analysis plotted in %.1f seconds."%(timelib.time() - start_time))
plt.clf()
plt.close()
return;
| 54.085828
| 220
| 0.678488
| 3,584
| 27,097
| 4.822824
| 0.072266
| 0.061556
| 0.038183
| 0.042175
| 0.913682
| 0.899219
| 0.871276
| 0.856292
| 0.841539
| 0.807579
| 0
| 0.016423
| 0.20899
| 27,097
| 500
| 221
| 54.194
| 0.790007
| 0.12566
| 0
| 0.702941
| 0
| 0
| 0.128799
| 0.033318
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014706
| false
| 0
| 0.020588
| 0
| 0.044118
| 0.029412
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6284164f9c865c641ea83ca4a0ed75ac6e015017
| 7,235
|
py
|
Python
|
tests/testproject/tests.py
|
ataylor32/django-friendly-tag-loader
|
00b91090172433b04f90c850bb1edeed1a262abf
|
[
"MIT"
] | 3
|
2019-12-24T06:52:51.000Z
|
2020-09-01T12:00:38.000Z
|
tests/testproject/tests.py
|
ataylor32/django-friendly-tag-loader
|
00b91090172433b04f90c850bb1edeed1a262abf
|
[
"MIT"
] | null | null | null |
tests/testproject/tests.py
|
ataylor32/django-friendly-tag-loader
|
00b91090172433b04f90c850bb1edeed1a262abf
|
[
"MIT"
] | null | null | null |
from django.template import Template, TemplateSyntaxError
from django.template.base import Lexer, Parser
from django.template.context import Context
from django.template.engine import Engine
from django.test import TestCase
engine = Engine.get_default()
def _render_template(template):
return Template(template).render(Context({})).strip()
class FriendlyLoadingTest(TestCase):
def test_cannot_load_missing_taglib_using_standard_load(self):
template = '{% load error_tags %}'
self.assertRaises(TemplateSyntaxError, Template, template)
def test_can_load_missing_taglib_using_friendly_load(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load error_tags %}')
self.assertTrue(
isinstance(Template(template), Template),
'Expected template to initialize')
def test_can_load_taglib_using_friendly_load(self):
template = '{% load friendly_loader %}{% friendly_load flatpages %}'
lexer = Lexer(template)
parser = Parser(lexer.tokenize(), engine.template_libraries, engine.template_builtins)
parser.parse()
self.assertTrue(
'get_flatpages' in parser.tags,
'Expected flatpages taglib to load and provide the get_flatpages tag')
def test_can_load_missing_and_existing_taglib_using_friendly_load(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load error_tags flatpages %}')
lexer = Lexer(template)
parser = Parser(lexer.tokenize(), engine.template_libraries, engine.template_builtins)
parser.parse()
self.assertTrue(
'get_flatpages' in parser.tags,
'Expected flatpages taglib to load and provide the get_flatpages tag')
def test_can_load_from_taglib(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load get_flatpages from flatpages %}')
lexer = Lexer(template)
parser = Parser(lexer.tokenize(), engine.template_libraries, engine.template_builtins)
parser.parse()
self.assertTrue(
'get_flatpages' in parser.tags,
'Expected flatpages taglib to load and provide the get_flatpages tag')
def test_can_load_from_missing_taglib(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load error from error_tags %}')
lexer = Lexer(template)
parser = Parser(lexer.tokenize(), engine.template_libraries, engine.template_builtins)
parser.parse()
self.assertTrue(
isinstance(Template(template), Template),
'Expected template to initialize')
class BaseRenderTest(TestCase):
def assertSuccess(self, template):
self.assertEqual(
'SUCCESS', _render_template(template),
'Expected template to render SUCCESS')
class HasTagTest(BaseRenderTest):
def test_must_have_arguments(self):
template = (
'{% load friendly_loader %}'
'{% if_has_tag %}FAIL{% endif_has_tag %}')
self.assertRaises(
TemplateSyntaxError, _render_template, template)
def test_can_test_builtins(self):
template = (
'{% load friendly_loader %}'
'{% if_has_tag now %}SUCCESS{% endif_has_tag %}')
self.assertSuccess(template)
def test_can_test_missing_tags(self):
template = (
'{% load friendly_loader %}'
'{% if_has_tag fail %}{% fail %}'
'{% else %}SUCCESS{% endif_has_tag %}')
self.assertSuccess(template)
def test_can_test_loaded_tags(self):
template = (
'{% load friendly_loader flatpages %}'
'{% if_has_tag get_flatpages %}SUCCESS{% endif_has_tag %}')
self.assertSuccess(template)
def test_can_test_friendly_loaded_tags(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load flatpages %}'
'{% if_has_tag get_flatpages %}SUCCESS{% endif_has_tag %}')
self.assertSuccess(template)
def test_can_test_multiple_existing_tags(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load flatpages %}'
'{% if_has_tag now get_flatpages %}SUCCESS{% endif_has_tag %}')
self.assertSuccess(template)
def test_can_test_both_existing_and_missing_tags(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load flatpages %}'
'{% if_has_tag get_flatpages fail %}FAIL'
'{% else %}SUCCESS{% endif_has_tag %}')
self.assertSuccess(template)
def test_can_test_missing_tags_without_else(self):
template = (
'{% load friendly_loader %}'
'{% if_has_tag fail %}FAIL{% endif_has_tag %}')
self.assertEqual(
'', _render_template(template),
'Expected template to render nothing')
class NotHasTagTest(BaseRenderTest):
def test_must_have_arguments(self):
template = (
'{% load friendly_loader %}'
'{% ifnot_has_tag %}FAIL{% endifnot_has_tag %}')
self.assertRaises(TemplateSyntaxError, _render_template, template)
def test_can_test_builtins(self):
template = (
'{% load friendly_loader %}'
'{% ifnot_has_tag now %}FAIL'
'{% else %}SUCCESS{% endifnot_has_tag %}')
self.assertSuccess(template)
def test_can_test_missing_tags(self):
template = (
'{% load friendly_loader %}'
'{% ifnot_has_tag fail %}SUCCESS'
'{% else %}FAIL{% endifnot_has_tag %}')
self.assertSuccess(template)
def test_can_test_loaded_tags(self):
template = (
'{% load friendly_loader flatpages %}'
'{% ifnot_has_tag get_flatpages %}FAIL'
'{% else %}SUCCESS{% endifnot_has_tag %}')
self.assertSuccess(template)
def test_can_test_friendly_loaded_tags(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load flatpages %}'
'{% ifnot_has_tag get_flatpages %}FAIL'
'{% else %}SUCCESS{% endifnot_has_tag %}')
self.assertSuccess(template)
def test_can_test_multiple_existing_tags(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load flatpages %}'
'{% ifnot_has_tag now get_flatpages %}FAIL'
'{% else %}SUCCESS{% endifnot_has_tag %}')
self.assertSuccess(template)
def test_can_test_both_existing_and_missing_tags(self):
template = (
'{% load friendly_loader %}'
'{% friendly_load flatpages %}'
'{% ifnot_has_tag get_flatpages fail %}SUCCESS'
'{% else %}FAIL{% endifnot_has_tag %}')
self.assertSuccess(template)
def test_can_test_tags_without_else(self):
template = (
'{% load friendly_loader %}'
'{% ifnot_has_tag now %}FAIL{% endifnot_has_tag %}')
self.assertEqual(
'', _render_template(template),
'Expected template to render nothing')
| 36.913265
| 94
| 0.618245
| 741
| 7,235
| 5.703104
| 0.095816
| 0.045433
| 0.083294
| 0.119262
| 0.858258
| 0.840274
| 0.840274
| 0.829389
| 0.797444
| 0.788452
| 0
| 0
| 0.274223
| 7,235
| 195
| 95
| 37.102564
| 0.804799
| 0
| 0
| 0.723926
| 0
| 0
| 0.324257
| 0
| 0
| 0
| 0
| 0
| 0.147239
| 1
| 0.147239
| false
| 0
| 0.030675
| 0.006135
| 0.208589
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
655b2e11a10dd45a37985b924ad412f1335c05b0
| 6,923
|
py
|
Python
|
test_parser.py
|
L3viathan/sql-mojo-parser
|
fc460c42f3fbcc21c6fc08c0aede8e2a5db637f3
|
[
"MIT"
] | null | null | null |
test_parser.py
|
L3viathan/sql-mojo-parser
|
fc460c42f3fbcc21c6fc08c0aede8e2a5db637f3
|
[
"MIT"
] | null | null | null |
test_parser.py
|
L3viathan/sql-mojo-parser
|
fc460c42f3fbcc21c6fc08c0aede8e2a5db637f3
|
[
"MIT"
] | null | null | null |
import pytest
from sql_mojo_parser import yacc
@pytest.mark.parametrize(
"string,result",
[
(
"select foo from bar",
{
"type": "select",
"columns": [
{"type": "name", "value": "foo"},
],
"table": {"type": "name", "value": "bar"},
}
),
(
"select * from bar",
{
"type": "select",
"columns": [
{"type": "star"},
],
"table": {"type": "name", "value": "bar"},
}
),
(
"select foo, bar from bar",
{
"type": "select",
"columns": [
{"type": "name", "value": "foo"},
{"type": "name", "value": "bar"},
],
"table": {"type": "name", "value": "bar"},
}
),
(
"select * from bar limit 10",
{
"type": "select",
"columns": [
{"type": "star"},
],
"table": {"type": "name", "value": "bar"},
"limit": 10,
}
),
(
"select * from bar where a=3",
{
"type": "select",
"columns": [
{"type": "star"},
],
"table": {"type": "name", "value": "bar"},
"condition": {
"op": "=",
"args": [
{"type": "name", "value": "a"},
{"type": "literal", "value": 3},
],
}
}
),
(
"select * from bar where not a=3",
{
"type": "select",
"columns": [
{"type": "star"},
],
"table": {"type": "name", "value": "bar"},
"condition": {
"op": "not",
"args": [
{
"op": "=",
"args": [
{"type": "name", "value": "a"},
{"type": "literal", "value": 3},
],
},
]
}
}
),
(
"select * from bar where a=3 and b=2",
{
"type": "select",
"columns": [
{"type": "star"},
],
"table": {"type": "name", "value": "bar"},
"condition": {
"op": "and",
"args": [
{
"op": "=",
"args": [
{"type": "name", "value": "a"},
{"type": "literal", "value": 3},
],
},
{
"op": "=",
"args": [
{"type": "name", "value": "b"},
{"type": "literal", "value": 2},
],
},
]
}
}
),
(
"select * from bar where a=3 and (b=2 or c=1)",
{
"type": "select",
"columns": [
{"type": "star"},
],
"table": {"type": "name", "value": "bar"},
"condition": {
"op": "and",
"args": [
{
"op": "=",
"args": [
{"type": "name", "value": "a"},
{"type": "literal", "value": 3},
],
},
{
"op": "or",
"args": [
{
"op": "=",
"args": [
{"type": "name", "value": "b"},
{"type": "literal", "value": 2},
],
},
{
"op": "=",
"args": [
{"type": "name", "value": "c"},
{"type": "literal", "value": 1},
],
},
],
},
],
},
},
),
(
"select * from bar where a=3 and b=2 or c=1",
{
"type": "select",
"columns": [
{"type": "star"},
],
"table": {"type": "name", "value": "bar"},
"condition": {
"op": "or",
"args": [
{
"op": "and",
"args": [
{
"op": "=",
"args": [
{"type": "name", "value": "a"},
{"type": "literal", "value": 3},
],
},
{
"op": "=",
"args": [
{"type": "name", "value": "b"},
{"type": "literal", "value": 2},
],
},
],
},
{
"op": "=",
"args": [
{"type": "name", "value": "c"},
{"type": "literal", "value": 1},
],
},
],
},
},
),
]
)
def test_parse_success(string, result):
assert yacc.parse(string) == result
@pytest.mark.parametrize(
"string",
[
("some random string",),
("select from foo",),
]
)
def test_parse_fail(string):
with pytest.raises(ValueError):
yacc.parse(string)
| 32.050926
| 72
| 0.197747
| 332
| 6,923
| 4.105422
| 0.141566
| 0.129127
| 0.209831
| 0.117388
| 0.774028
| 0.774028
| 0.753485
| 0.753485
| 0.703595
| 0.639765
| 0
| 0.010008
| 0.653618
| 6,923
| 215
| 73
| 32.2
| 0.558382
| 0
| 0
| 0.597156
| 0
| 0
| 0.178391
| 0
| 0
| 0
| 0
| 0
| 0.004739
| 1
| 0.009479
| false
| 0
| 0.009479
| 0
| 0.018957
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6570cacc644b09160c9cccb448928435559d6ccf
| 10,208
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowInterfacesDetail/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowInterfacesDetail/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowInterfacesDetail/cli/equal/golden_output1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"GigabitEthernet0/0/0/0": {
"auto_negotiate": False,
"bandwidth": 768,
"carrier_delay": "10",
"counters": {
"carrier_transitions": 0,
"in_abort": 0,
"in_broadcast_pkts": 0,
"in_crc_errors": 0,
"in_discards": 0,
"in_frame": 0,
"in_frame_errors": 0,
"in_giants": 0,
"in_ignored": 0,
"in_multicast_pkts": 0,
"in_octets": 0,
"in_overrun": 0,
"in_parity": 0,
"in_pkts": 0,
"in_runts": 0,
"in_throttles": 0,
"in_unknown_protos": 0,
"last_clear": "never",
"out_applique": 0,
"out_broadcast_pkts": 0,
"out_buffer_failures": 0,
"out_buffer_swapped_out": 0,
"out_discards": 0,
"out_errors": 0,
"out_multicast_pkts": 0,
"out_octets": 0,
"out_pkts": 0,
"out_resets": 0,
"out_underruns": 0,
"rate": {
"in_rate": 0,
"in_rate_pkts": 0,
"load_interval": 30,
"out_rate": 0,
"out_rate_pkts": 0,
},
},
"description": "desc",
"duplex_mode": "full",
"enabled": False,
"encapsulations": {"encapsulation": "arpa"},
"flow_control": {"flow_control_receive": False, "flow_control_send": False},
"interface_state": 0,
"ipv4": {"10.1.1.1/24": {"ip": "10.1.1.1", "prefix_length": "24"}},
"last_input": "never",
"last_output": "never",
"line_protocol": "administratively down",
"oper_status": "down",
"location": "unknown",
"mac_address": "aaaa.bbff.8888",
"mtu": 1600,
"phys_address": "5254.00ff.0c7e",
"port_speed": "1000Mb/s",
"reliability": "255/255",
"rxload": "0/255",
"txload": "0/255",
"types": "gigabitethernet",
},
"GigabitEthernet0/0/0/0.10": {
"bandwidth": 768,
"counters": {
"in_broadcast_pkts": 0,
"in_discards": 0,
"in_multicast_pkts": 0,
"in_octets": 0,
"in_pkts": 0,
"in_unknown_protos": 0,
"last_clear": "never",
"out_broadcast_pkts": 0,
"out_discards": 0,
"out_multicast_pkts": 0,
"out_octets": 0,
"out_pkts": 0,
"rate": {
"in_rate": 0,
"in_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"out_rate_pkts": 0,
},
},
"enabled": False,
"encapsulations": {
"encapsulation": "802.1q " "virtual " "lan",
"first_dot1q": "10",
"second_dot1q": "10",
},
"interface_state": 0,
"last_input": "never",
"last_output": "never",
"line_protocol": "administratively down",
"oper_status": "down",
"mac_address": "aaaa.bbff.8888",
"mtu": 1608,
"reliability": "255/255",
"rxload": "0/255",
"txload": "0/255",
"types": "vlan sub-(s)",
},
"GigabitEthernet0/0/0/0.20": {
"bandwidth": 768,
"counters": {
"in_broadcast_pkts": 0,
"in_discards": 0,
"in_multicast_pkts": 0,
"in_octets": 0,
"in_pkts": 0,
"in_unknown_protos": 0,
"last_clear": "never",
"out_broadcast_pkts": 0,
"out_discards": 0,
"out_multicast_pkts": 0,
"out_octets": 0,
"out_pkts": 0,
"rate": {
"in_rate": 0,
"in_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"out_rate_pkts": 0,
},
},
"enabled": False,
"encapsulations": {
"encapsulation": "802.1q " "virtual " "lan",
"first_dot1q": "20",
},
"interface_state": 0,
"last_input": "never",
"last_output": "never",
"line_protocol": "administratively down",
"oper_status": "down",
"mac_address": "aaaa.bbff.8888",
"mtu": 1604,
"reliability": "255/255",
"rxload": "0/255",
"txload": "0/255",
"types": "vlan sub-(s)",
},
"GigabitEthernet0/0/0/1": {
"arp_timeout": "04:00:00",
"arp_type": "arpa",
"auto_negotiate": False,
"bandwidth": 1000000,
"carrier_delay": "10",
"counters": {
"carrier_transitions": 1,
"in_abort": 0,
"in_broadcast_pkts": 0,
"in_crc_errors": 0,
"in_discards": 0,
"in_frame": 0,
"in_frame_errors": 0,
"in_giants": 0,
"in_ignored": 0,
"in_multicast_pkts": 29056,
"in_octets": 18221418,
"in_overrun": 0,
"in_parity": 0,
"in_pkts": 146164,
"in_runts": 0,
"in_throttles": 0,
"in_unknown_protos": 0,
"last_clear": "never",
"out_applique": 0,
"out_broadcast_pkts": 2,
"out_buffer_failures": 0,
"out_buffer_swapped_out": 0,
"out_discards": 0,
"out_errors": 0,
"out_multicast_pkts": 6246,
"out_octets": 10777610,
"out_pkts": 123696,
"out_resets": 0,
"out_underruns": 0,
"rate": {
"in_rate": 0,
"in_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"out_rate_pkts": 0,
},
},
"duplex_mode": "full",
"enabled": True,
"encapsulations": {"encapsulation": "arpa"},
"flow_control": {"flow_control_receive": False, "flow_control_send": False},
"interface_state": 1,
"ipv4": {"10.1.5.1/24": {"ip": "10.1.5.1", "prefix_length": "24"}},
"last_input": "00:01:09",
"last_link_flapped": "1w5d",
"last_output": "00:01:09",
"line_protocol": "up",
"oper_status": "up",
"location": "unknown",
"mac_address": "5254.00ff.6459",
"mtu": 1514,
"phys_address": "5254.00ff.6459",
"port_speed": "1000Mb/s",
"reliability": "255/255",
"rxload": "0/255",
"txload": "0/255",
"types": "gigabitethernet",
},
"MgmtEth0/0/CPU0/0": {
"auto_negotiate": True,
"bandwidth": 0,
"carrier_delay": "10",
"counters": {
"carrier_transitions": 0,
"in_abort": 0,
"in_broadcast_pkts": 0,
"in_crc_errors": 0,
"in_discards": 0,
"in_frame": 0,
"in_frame_errors": 0,
"in_giants": 0,
"in_ignored": 0,
"in_multicast_pkts": 0,
"in_octets": 0,
"in_overrun": 0,
"in_parity": 0,
"in_pkts": 0,
"in_runts": 0,
"in_throttles": 0,
"in_unknown_protos": 0,
"last_clear": "never",
"out_applique": 0,
"out_broadcast_pkts": 0,
"out_buffer_failures": 0,
"out_buffer_swapped_out": 0,
"out_discards": 0,
"out_errors": 0,
"out_multicast_pkts": 0,
"out_octets": 0,
"out_pkts": 0,
"out_resets": 0,
"out_underruns": 0,
"rate": {
"in_rate": 0,
"in_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"out_rate_pkts": 0,
},
},
"duplex_mode": "duplex unknown",
"enabled": False,
"encapsulations": {"encapsulation": "arpa"},
"flow_control": {"flow_control_receive": False, "flow_control_send": False},
"interface_state": 0,
"last_input": "never",
"last_output": "never",
"line_protocol": "administratively down",
"oper_status": "down",
"location": "unknown",
"mac_address": "5254.00ff.3007",
"mtu": 1514,
"phys_address": "5254.00ff.3007",
"port_speed": "0",
"reliability": "255/255",
"rxload": "unknown",
"txload": "unknown",
"types": "management ethernet",
},
"Loopback0": {
"bandwidth": 0,
"description": "loopback0 BGP test",
"enabled": True,
"encapsulations": {"encapsulation": "loopback"},
"interface_state": 1,
"ipv4": {"10.255.20.81/32": {"ip": "10.255.20.81", "prefix_length": "32"}},
"last_input": "Unknown",
"last_link_flapped": "13:45:11",
"last_output": "Unknown",
"line_protocol": "up",
"mtu": 1500,
"oper_status": "up",
"reliability": "Unknown",
"rxload": "unknown",
"txload": "unknown",
},
"Null0": {
"bandwidth": 0,
"counters": {
"in_broadcast_pkts": 0,
"in_discards": 0,
"in_multicast_pkts": 0,
"in_octets": 0,
"in_pkts": 0,
"in_unknown_protos": 0,
"last_clear": "never",
"out_broadcast_pkts": 0,
"out_discards": 0,
"out_multicast_pkts": 0,
"out_octets": 0,
"out_pkts": 0,
"rate": {
"in_rate": 0,
"in_rate_pkts": 0,
"load_interval": 300,
"out_rate": 0,
"out_rate_pkts": 0,
},
},
"enabled": True,
"encapsulations": {"encapsulation": "null"},
"last_input": "never",
"last_output": "never",
"line_protocol": "up",
"oper_status": "up",
"mtu": 1500,
"reliability": "255/255",
"rxload": "unknown",
"txload": "unknown",
"types": "null",
},
}
| 31.409231
| 84
| 0.44808
| 996
| 10,208
| 4.299197
| 0.149598
| 0.045539
| 0.026156
| 0.022419
| 0.818076
| 0.8078
| 0.754787
| 0.750117
| 0.712284
| 0.712284
| 0
| 0.081711
| 0.390968
| 10,208
| 324
| 85
| 31.506173
| 0.607045
| 0
| 0
| 0.777778
| 0
| 0
| 0.425255
| 0.015674
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65bc2062cb79ee81e9d1a1d15d25a90e64138525
| 16,673
|
py
|
Python
|
operations/assets/migrations/0001_initial.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/assets/migrations/0001_initial.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/assets/migrations/0001_initial.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-09-18 07:07
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import lib.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('operations', '0023_auto_20180828_0917'),
]
operations = [
migrations.CreateModel(
name='Asset',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('changed_at', models.DateTimeField(auto_now=True, null=True)),
('deleted', models.BooleanField(db_index=True, default=False)),
('category_one', models.CharField(blank=True, choices=[('appliances', 'Appliances'), ('electronic_equipment', 'Electronic Equipment'), ('furniture', 'Furniture'), ('it_equipment', 'IT Equipment'), ('stationery', 'Stationery')], max_length=50, null=True, verbose_name='Category 1')),
('category_two', models.CharField(blank=True, max_length=50, null=True, verbose_name='Category 2')),
('category_three', models.CharField(blank=True, max_length=50, null=True, verbose_name='Category 3')),
('asset_description', models.CharField(blank=True, max_length=255, null=True, verbose_name='Asset Description')),
('make', models.CharField(blank=True, max_length=255, null=True)),
('model', models.CharField(blank=True, max_length=255, null=True)),
('serial_number', models.CharField(blank=True, max_length=255, null=True, verbose_name='Serial Number')),
('colour', models.CharField(blank=True, max_length=255, null=True)),
('reason', models.CharField(blank=True, max_length=255, null=True)),
('condition', models.CharField(blank=True, choices=[('good', 'Good'), ('new', 'New'), ('poor', 'Poor')], max_length=50, null=True)),
('status', models.CharField(blank=True, choices=[('in_use', 'In Use'), ('in_storage', 'In Storage')], max_length=50, null=True)),
('supplier_name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Supplier Name')),
('warranty_expiry', models.DateTimeField(blank=True, null=True, verbose_name='Warranty Expiry')),
('address', lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='asset_address', to='operations.Address')),
],
options={
'default_permissions': [],
'abstract': False,
},
),
migrations.CreateModel(
name='AssetDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('changed_at', models.DateTimeField(auto_now=True, null=True)),
('deleted', models.BooleanField(db_index=True, default=False)),
('quantity', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)])),
('user', models.CharField(blank=True, max_length=255, null=True)),
('department', models.CharField(blank=True, choices=[('administration', 'Administration'), ('client_services', 'Client Services'), ('finance', 'Finance'), ('human_resources', 'Human Resources'), ('it', 'IT'), ('marketing', 'Marketing'), ('operations', 'Operations'), ('sales', 'Sales')], max_length=50, null=True)),
('created_by', lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_asset_detail', to=settings.AUTH_USER_MODEL)),
('district', lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='district_asset_detail', to='operations.Branch')),
('modified_by', lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_asset_detail', to=settings.AUTH_USER_MODEL)),
('region', lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='region_asset_detail', to='operations.Region')),
],
options={
'default_permissions': [],
'abstract': False,
},
),
migrations.CreateModel(
name='AssetPurchaseDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('changed_at', models.DateTimeField(auto_now=True, null=True)),
('deleted', models.BooleanField(db_index=True, default=False)),
('invoice_number', models.CharField(blank=True, max_length=255, null=True, verbose_name='Invoice Number')),
('purchase_date', models.DateTimeField(blank=True, null=True, verbose_name='Purchase Date')),
('asset_purchase_price', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Purchase Price')),
('vat', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='VAT')),
('total_price', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Total Price')),
('created_by', lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_purchase_detail_asset', to=settings.AUTH_USER_MODEL)),
('modified_by', lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_purchase_detail_asset', to=settings.AUTH_USER_MODEL)),
],
options={
'default_permissions': [],
'abstract': False,
},
),
migrations.CreateModel(
name='HistoricalAsset',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created_at', models.DateTimeField(blank=True, editable=False, null=True)),
('changed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('deleted', models.BooleanField(db_index=True, default=False)),
('category_one', models.CharField(blank=True, choices=[('appliances', 'Appliances'), ('electronic_equipment', 'Electronic Equipment'), ('furniture', 'Furniture'), ('it_equipment', 'IT Equipment'), ('stationery', 'Stationery')], max_length=50, null=True, verbose_name='Category 1')),
('category_two', models.CharField(blank=True, max_length=50, null=True, verbose_name='Category 2')),
('category_three', models.CharField(blank=True, max_length=50, null=True, verbose_name='Category 3')),
('asset_description', models.CharField(blank=True, max_length=255, null=True, verbose_name='Asset Description')),
('make', models.CharField(blank=True, max_length=255, null=True)),
('model', models.CharField(blank=True, max_length=255, null=True)),
('serial_number', models.CharField(blank=True, max_length=255, null=True, verbose_name='Serial Number')),
('colour', models.CharField(blank=True, max_length=255, null=True)),
('reason', models.CharField(blank=True, max_length=255, null=True)),
('condition', models.CharField(blank=True, choices=[('good', 'Good'), ('new', 'New'), ('poor', 'Poor')], max_length=50, null=True)),
('status', models.CharField(blank=True, choices=[('in_use', 'In Use'), ('in_storage', 'In Storage')], max_length=50, null=True)),
('supplier_name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Supplier Name')),
('warranty_expiry', models.DateTimeField(blank=True, null=True, verbose_name='Warranty Expiry')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('address', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Address')),
('asset_detail', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='assets.AssetDetail')),
('asset_purchase_detail', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='assets.AssetPurchaseDetail')),
('contact_person', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Contact')),
('created_by', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('modified_by', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical asset',
},
),
migrations.CreateModel(
name='HistoricalAssetDetail',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created_at', models.DateTimeField(blank=True, editable=False, null=True)),
('changed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('deleted', models.BooleanField(db_index=True, default=False)),
('quantity', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)])),
('user', models.CharField(blank=True, max_length=255, null=True)),
('department', models.CharField(blank=True, choices=[('administration', 'Administration'), ('client_services', 'Client Services'), ('finance', 'Finance'), ('human_resources', 'Human Resources'), ('it', 'IT'), ('marketing', 'Marketing'), ('operations', 'Operations'), ('sales', 'Sales')], max_length=50, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('created_by', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL)),
('district', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Branch')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('modified_by', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL)),
('region', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='operations.Region')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical asset detail',
},
),
migrations.CreateModel(
name='HistoricalAssetPurchaseDetail',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created_at', models.DateTimeField(blank=True, editable=False, null=True)),
('changed_at', models.DateTimeField(blank=True, editable=False, null=True)),
('deleted', models.BooleanField(db_index=True, default=False)),
('invoice_number', models.CharField(blank=True, max_length=255, null=True, verbose_name='Invoice Number')),
('purchase_date', models.DateTimeField(blank=True, null=True, verbose_name='Purchase Date')),
('asset_purchase_price', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Purchase Price')),
('vat', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='VAT')),
('total_price', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Total Price')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('created_by', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('modified_by', lib.fields.ProtectedForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical asset purchase detail',
},
),
migrations.AddField(
model_name='asset',
name='asset_detail',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='asset_detail', to='assets.AssetDetail'),
),
migrations.AddField(
model_name='asset',
name='asset_purchase_detail',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='asset_purchase_detail', to='assets.AssetPurchaseDetail'),
),
migrations.AddField(
model_name='asset',
name='contact_person',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='asset_contact', to='operations.Contact'),
),
migrations.AddField(
model_name='asset',
name='created_by',
field=lib.fields.ProtectedForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='created_asset', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='asset',
name='modified_by',
field=lib.fields.ProtectedForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modified_asset', to=settings.AUTH_USER_MODEL),
),
]
| 81.331707
| 331
| 0.644395
| 1,828
| 16,673
| 5.685449
| 0.088074
| 0.057731
| 0.057731
| 0.069277
| 0.917156
| 0.91629
| 0.900125
| 0.891081
| 0.867026
| 0.867026
| 0
| 0.011624
| 0.200264
| 16,673
| 204
| 332
| 81.730392
| 0.767812
| 0.004078
| 0
| 0.734694
| 1
| 0
| 0.185701
| 0.021202
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030612
| 0
| 0.05102
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
65d2f235f7f13bafe49da415e9482735d9269ee7
| 496
|
py
|
Python
|
diagnostic_classifiers/dataset_readers/__init__.py
|
maayanorner/lexcomp
|
4bcd033c7483b1aa09ee569238180502858caa21
|
[
"Apache-2.0"
] | 28
|
2019-02-28T10:00:28.000Z
|
2021-06-28T16:41:27.000Z
|
diagnostic_classifiers/dataset_readers/__init__.py
|
maayanorner/lexcomp
|
4bcd033c7483b1aa09ee569238180502858caa21
|
[
"Apache-2.0"
] | null | null | null |
diagnostic_classifiers/dataset_readers/__init__.py
|
maayanorner/lexcomp
|
4bcd033c7483b1aa09ee569238180502858caa21
|
[
"Apache-2.0"
] | 6
|
2019-02-28T11:58:28.000Z
|
2021-10-17T19:58:58.000Z
|
from diagnostic_classifiers.dataset_readers.sequence_labeling_dataset_reader import SeqLabelReader
from diagnostic_classifiers.dataset_readers.sentence_word_dataset_reader import SentenceWordDatasetReader
from diagnostic_classifiers.dataset_readers.sentence_and_span_classification_dataset_reader import SentenceSpanClassificationDatasetReader
from diagnostic_classifiers.dataset_readers.sentence_span_sentence_classification_dataset_reader import SentenceSpanSentenceClassificationDatasetReader
| 99.2
| 151
| 0.951613
| 48
| 496
| 9.333333
| 0.375
| 0.125
| 0.223214
| 0.285714
| 0.401786
| 0.314732
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 496
| 4
| 152
| 124
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
02e9ff772736c6ce673ea4fe35d68f3a20a82550
| 8
|
py
|
Python
|
tests/res/dicts/get/20/q/40.py
|
ssato/python-anyconfig
|
09af1950f3226759932f5168d52f5e06ab88815c
|
[
"MIT"
] | 213
|
2015-01-14T22:09:20.000Z
|
2022-02-02T17:23:41.000Z
|
tests/res/dicts/get/20/q/40.py
|
ssato/python-anyconfig
|
09af1950f3226759932f5168d52f5e06ab88815c
|
[
"MIT"
] | 120
|
2015-03-13T15:47:43.000Z
|
2022-03-31T01:55:34.000Z
|
tests/res/dicts/get/20/q/40.py
|
ssato/python-anyconfig
|
09af1950f3226759932f5168d52f5e06ab88815c
|
[
"MIT"
] | 34
|
2015-01-12T05:03:30.000Z
|
2021-09-09T14:40:56.000Z
|
"/a~1b"
| 4
| 7
| 0.375
| 2
| 8
| 1.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.125
| 8
| 1
| 8
| 8
| 0.285714
| 0.625
| 0
| 0
| 0
| 0
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f303773be53181a7cddb99c264bbc0c1cbc78125
| 38
|
py
|
Python
|
encryption/__init__.py
|
trishantpahwa/PasswordProtectPDF
|
a2f167a7e93f7470bb336bf2773f2f3d1c241fbd
|
[
"BSD-2-Clause"
] | null | null | null |
encryption/__init__.py
|
trishantpahwa/PasswordProtectPDF
|
a2f167a7e93f7470bb336bf2773f2f3d1c241fbd
|
[
"BSD-2-Clause"
] | null | null | null |
encryption/__init__.py
|
trishantpahwa/PasswordProtectPDF
|
a2f167a7e93f7470bb336bf2773f2f3d1c241fbd
|
[
"BSD-2-Clause"
] | null | null | null |
from .encrypt_file import add_password
| 38
| 38
| 0.894737
| 6
| 38
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b82a41e59a18d8a6811dbe9f25583aacd35a67b2
| 15,540
|
py
|
Python
|
tests/adspygoogle/dfp/forecast_service_unittest.py
|
krux/adspygoogle
|
6505a71122f45fe3e675f27f2c29f67a1768069b
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/adspygoogle/dfp/forecast_service_unittest.py
|
krux/adspygoogle
|
6505a71122f45fe3e675f27f2c29f67a1768069b
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/adspygoogle/dfp/forecast_service_unittest.py
|
krux/adspygoogle
|
6505a71122f45fe3e675f27f2c29f67a1768069b
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests to cover ForecastService."""
__author__ = 'api.sgrinberg@gmail.com (Stan Grinberg)'
from datetime import date
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..'))
import unittest
from adspygoogle.common import Utils
from tests.adspygoogle.dfp import client
from tests.adspygoogle.dfp import HTTP_PROXY
from tests.adspygoogle.dfp import SERVER_V201108
from tests.adspygoogle.dfp import SERVER_V201111
from tests.adspygoogle.dfp import TEST_VERSION_V201108
from tests.adspygoogle.dfp import TEST_VERSION_V201111
from tests.adspygoogle.dfp import VERSION_V201108
from tests.adspygoogle.dfp import VERSION_V201111
class ForecastServiceTestV201108(unittest.TestCase):
"""Unittest suite for ForecastService using v201108."""
SERVER = SERVER_V201108
VERSION = VERSION_V201108
client.debug = False
service = None
order_id = '0'
ad_unit_id = '0'
line_item_id = '0'
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetForecastService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.order_id == '0':
company = {
'name': 'Company #%s' % Utils.GetUniqueName(),
'type': 'ADVERTISER'
}
advertiser_id = client.GetCompanyService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateCompany(company)[0]['id']
filter_statement = {'query': 'ORDER BY name LIMIT 500'}
users = client.GetUserService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).GetUsersByStatement(filter_statement)
trafficker_id = '0'
for user in users[0]['results']:
if user['roleName'] in ('Trafficker',):
trafficker_id = user['id']
break
order = {
'advertiserId': advertiser_id,
'currencyCode': 'USD',
'name': 'Order #%s' % Utils.GetUniqueName(),
'traffickerId': trafficker_id
}
self.__class__.order_id = client.GetOrderService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateOrder(order)[0]['id']
if self.__class__.ad_unit_id == '0':
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
filter_statement = {'query': 'WHERE parentId IS NULL LIMIT 500'}
root_ad_unit_id = inventory_service.GetAdUnitsByStatement(
filter_statement)[0]['results'][0]['id']
ad_unit = {
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'adUnitSizes': [
{
'size': {
'width': '300',
'height': '250'
}
}
],
'description': 'Ad unit description.',
'targetWindow': 'BLANK'
}
self.__class__.ad_unit_id = inventory_service.CreateAdUnit(
ad_unit)[0]['id']
if self.__class__.line_item_id == '0':
line_item_service = client.GetLineItemService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
line_item = {
'name': 'Line item #%s' % Utils.GetUniqueName(),
'orderId': self.__class__.order_id,
'targeting': {
'inventoryTargeting': {
'targetedAdUnitIds': [self.__class__.ad_unit_id]
}
},
'creativePlaceholders': [
{
'size': {
'width': '300',
'height': '250'
}
},
{
'size': {
'width': '120',
'height': '600'
}
}
],
'lineItemType': 'STANDARD',
'startDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '1'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'endDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '30'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'costType': 'CPM',
'costPerUnit': {
'currencyCode': 'USD',
'microAmount': '2000000'
},
'creativeRotationType': 'EVEN',
'discountType': 'PERCENTAGE',
'unitsBought': '500000',
'unitType': 'IMPRESSIONS'
}
self.__class__.line_item_id = line_item_service.CreateLineItem(
line_item)[0]['id']
def testGetForecast(self):
"""Test whether we can get a forecast for given line item."""
line_item = {
'name': 'Line item #%s' % Utils.GetUniqueName(),
'orderId': self.__class__.order_id,
'targeting': {
'inventoryTargeting': {
'targetedAdUnitIds': [self.__class__.ad_unit_id]
},
'dayPartTargeting': {
'dayParts': [
{
'dayOfWeek': 'TUESDAY',
'startTime': {
'hour': '10',
'minute': 'ZERO'
},
'endTime': {
'hour': '18',
'minute': 'THIRTY'
}
}
],
'timeZone': 'PUBLISHER'
},
'userDomainTargeting': {
'domains': ['google.com'],
'targeted': 'false'
}
},
'creativePlaceholders': [
{
'size': {
'width': '300',
'height': '250'
}
},
{
'size': {
'width': '120',
'height': '600'
}
}
],
'lineItemType': 'STANDARD',
'startDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '1'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'endDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '30'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'costType': 'CPM',
'costPerUnit': {
'currencyCode': 'USD',
'microAmount': '2000000'
},
'creativeRotationType': 'EVEN',
'discountType': 'PERCENTAGE',
'unitsBought': '500000',
'unitType': 'IMPRESSIONS'
}
self.assert_(isinstance(self.__class__.service.GetForecast(
line_item), tuple))
def testGetForecastById(self):
"""Test whether we can get a forecast for existing line item."""
self.assert_(isinstance(self.__class__.service.GetForecastById(
self.__class__.line_item_id), tuple))
class ForecastServiceTestV201111(unittest.TestCase):
"""Unittest suite for ForecastService using v201111."""
SERVER = SERVER_V201111
VERSION = VERSION_V201111
client.debug = False
service = None
order_id = '0'
ad_unit_id = '0'
line_item_id = '0'
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetForecastService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.order_id == '0':
company = {
'name': 'Company #%s' % Utils.GetUniqueName(),
'type': 'ADVERTISER'
}
advertiser_id = client.GetCompanyService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateCompany(company)[0]['id']
filter_statement = {'query': 'ORDER BY name LIMIT 500'}
users = client.GetUserService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).GetUsersByStatement(filter_statement)
trafficker_id = '0'
for user in users[0]['results']:
if user['roleName'] in ('Trafficker',):
trafficker_id = user['id']
break
order = {
'advertiserId': advertiser_id,
'currencyCode': 'USD',
'name': 'Order #%s' % Utils.GetUniqueName(),
'traffickerId': trafficker_id
}
self.__class__.order_id = client.GetOrderService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateOrder(order)[0]['id']
if self.__class__.ad_unit_id == '0':
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
filter_statement = {'query': 'WHERE parentId IS NULL LIMIT 500'}
root_ad_unit_id = inventory_service.GetAdUnitsByStatement(
filter_statement)[0]['results'][0]['id']
ad_unit = {
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'adUnitSizes': [
{
'size': {
'width': '300',
'height': '250'
}
}
],
'description': 'Ad unit description.',
'targetWindow': 'BLANK'
}
self.__class__.ad_unit_id = inventory_service.CreateAdUnit(
ad_unit)[0]['id']
if self.__class__.line_item_id == '0':
line_item_service = client.GetLineItemService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
line_item = {
'name': 'Line item #%s' % Utils.GetUniqueName(),
'orderId': self.__class__.order_id,
'targeting': {
'inventoryTargeting': {
'targetedAdUnitIds': [self.__class__.ad_unit_id]
}
},
'creativePlaceholders': [
{
'size': {
'width': '300',
'height': '250'
}
},
{
'size': {
'width': '120',
'height': '600'
}
}
],
'lineItemType': 'STANDARD',
'startDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '1'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'endDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '30'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'costType': 'CPM',
'costPerUnit': {
'currencyCode': 'USD',
'microAmount': '2000000'
},
'creativeRotationType': 'EVEN',
'discountType': 'PERCENTAGE',
'unitsBought': '500000',
'unitType': 'IMPRESSIONS'
}
self.__class__.line_item_id = line_item_service.CreateLineItem(
line_item)[0]['id']
def testGetForecast(self):
"""Test whether we can get a forecast for given line item."""
line_item = {
'name': 'Line item #%s' % Utils.GetUniqueName(),
'orderId': self.__class__.order_id,
'targeting': {
'inventoryTargeting': {
'targetedAdUnitIds': [self.__class__.ad_unit_id]
},
'dayPartTargeting': {
'dayParts': [
{
'dayOfWeek': 'TUESDAY',
'startTime': {
'hour': '10',
'minute': 'ZERO'
},
'endTime': {
'hour': '18',
'minute': 'THIRTY'
}
}
],
'timeZone': 'PUBLISHER'
},
'userDomainTargeting': {
'domains': ['google.com'],
'targeted': 'false'
}
},
'creativePlaceholders': [
{
'size': {
'width': '300',
'height': '250'
}
},
{
'size': {
'width': '120',
'height': '600'
}
}
],
'lineItemType': 'STANDARD',
'startDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '1'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'endDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '30'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'costType': 'CPM',
'costPerUnit': {
'currencyCode': 'USD',
'microAmount': '2000000'
},
'creativeRotationType': 'EVEN',
'discountType': 'PERCENTAGE',
'unitsBought': '500000',
'unitType': 'IMPRESSIONS'
}
self.assert_(isinstance(self.__class__.service.GetForecast(
line_item), tuple))
def testGetForecastById(self):
"""Test whether we can get a forecast for existing line item."""
self.assert_(isinstance(self.__class__.service.GetForecastById(
self.__class__.line_item_id), tuple))
def makeTestSuiteV201108():
"""Set up test suite using v201108.
Returns:
TestSuite test suite using v201108.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(ForecastServiceTestV201108))
return suite
def makeTestSuiteV201111():
"""Set up test suite using v201111.
Returns:
TestSuite test suite using v201111.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(ForecastServiceTestV201111))
return suite
if __name__ == '__main__':
suites = []
if TEST_VERSION_V201108:
suites.append(makeTestSuiteV201108())
if TEST_VERSION_V201111:
suites.append(makeTestSuiteV201111())
if suites:
alltests = unittest.TestSuite(suites)
unittest.main(defaultTest='alltests')
| 31.204819
| 74
| 0.489511
| 1,296
| 15,540
| 5.58642
| 0.19213
| 0.067127
| 0.01547
| 0.031492
| 0.83826
| 0.816713
| 0.799171
| 0.752486
| 0.752486
| 0.752486
| 0
| 0.039216
| 0.37973
| 15,540
| 497
| 75
| 31.267606
| 0.7119
| 0.039189
| 0
| 0.736111
| 0
| 0
| 0.176108
| 0.001611
| 0
| 0
| 0
| 0
| 0.009259
| 0
| null | null | 0
| 0.030093
| null | null | 0.00463
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b8327c09f7ab967ffe40fd75a1b79952360c3e09
| 3,906
|
py
|
Python
|
tests/tools/assigner/actions/fixtures.py
|
akashvacher/kafka-tools
|
0d98bbefc1105851b7b7203de4f6c68d9c097730
|
[
"Apache-2.0"
] | 578
|
2016-05-05T05:18:15.000Z
|
2022-03-23T07:18:07.000Z
|
tests/tools/assigner/actions/fixtures.py
|
akashvacher/kafka-tools
|
0d98bbefc1105851b7b7203de4f6c68d9c097730
|
[
"Apache-2.0"
] | 94
|
2016-04-29T23:25:38.000Z
|
2022-02-07T17:16:16.000Z
|
tests/tools/assigner/actions/fixtures.py
|
akashvacher/kafka-tools
|
0d98bbefc1105851b7b7203de4f6c68d9c097730
|
[
"Apache-2.0"
] | 150
|
2016-04-29T16:33:20.000Z
|
2022-03-14T10:05:48.000Z
|
import argparse
from kafka.tools.models.broker import Broker
from kafka.tools.models.cluster import Cluster
from kafka.tools.models.topic import Topic
def set_up_cluster():
cluster = Cluster()
cluster.retention = 100000
cluster.add_broker(Broker("brokerhost1.example.com", id=1))
cluster.add_broker(Broker("brokerhost2.example.com", id=2))
cluster.brokers[1].rack = "a"
cluster.brokers[2].rack = "b"
cluster.add_topic(Topic("testTopic1", 2))
cluster.add_topic(Topic("testTopic2", 2))
partition = cluster.topics['testTopic1'].partitions[0]
partition.add_replica(cluster.brokers[1], 0)
partition.add_replica(cluster.brokers[2], 1)
partition = cluster.topics['testTopic1'].partitions[1]
partition.add_replica(cluster.brokers[2], 0)
partition.add_replica(cluster.brokers[1], 1)
partition = cluster.topics['testTopic2'].partitions[0]
partition.add_replica(cluster.brokers[2], 0)
partition.add_replica(cluster.brokers[1], 1)
partition = cluster.topics['testTopic2'].partitions[1]
partition.add_replica(cluster.brokers[1], 0)
partition.add_replica(cluster.brokers[2], 1)
return cluster
def set_up_cluster_4broker():
cluster = Cluster()
cluster.add_broker(Broker("brokerhost1.example.com", id=1))
cluster.add_broker(Broker("brokerhost2.example.com", id=2))
cluster.add_broker(Broker("brokerhost3.example.com", id=3))
cluster.add_broker(Broker("brokerhost4.example.com", id=4))
cluster.brokers[1].rack = "a"
cluster.brokers[2].rack = "a"
cluster.brokers[3].rack = "b"
cluster.brokers[4].rack = "b"
cluster.add_topic(Topic("testTopic1", 4))
cluster.add_topic(Topic("testTopic2", 4))
cluster.add_topic(Topic("testTopic3", 4))
partition = cluster.topics['testTopic1'].partitions[0]
partition.add_replica(cluster.brokers[1], 0)
partition.add_replica(cluster.brokers[2], 1)
partition = cluster.topics['testTopic1'].partitions[1]
partition.add_replica(cluster.brokers[2], 0)
partition.add_replica(cluster.brokers[3], 1)
partition = cluster.topics['testTopic1'].partitions[2]
partition.add_replica(cluster.brokers[2], 0)
partition.add_replica(cluster.brokers[3], 1)
partition = cluster.topics['testTopic1'].partitions[3]
partition.add_replica(cluster.brokers[4], 0)
partition.add_replica(cluster.brokers[1], 1)
partition = cluster.topics['testTopic2'].partitions[0]
partition.add_replica(cluster.brokers[4], 0)
partition.add_replica(cluster.brokers[3], 1)
partition = cluster.topics['testTopic2'].partitions[1]
partition.add_replica(cluster.brokers[2], 0)
partition.add_replica(cluster.brokers[4], 1)
partition = cluster.topics['testTopic2'].partitions[2]
partition.add_replica(cluster.brokers[2], 0)
partition.add_replica(cluster.brokers[1], 1)
partition = cluster.topics['testTopic2'].partitions[3]
partition.add_replica(cluster.brokers[3], 0)
partition.add_replica(cluster.brokers[1], 1)
partition = cluster.topics['testTopic3'].partitions[0]
partition.add_replica(cluster.brokers[3], 0)
partition.add_replica(cluster.brokers[2], 1)
partition = cluster.topics['testTopic3'].partitions[1]
partition.add_replica(cluster.brokers[4], 0)
partition.add_replica(cluster.brokers[2], 1)
partition = cluster.topics['testTopic3'].partitions[2]
partition.add_replica(cluster.brokers[1], 0)
partition.add_replica(cluster.brokers[2], 1)
partition = cluster.topics['testTopic3'].partitions[3]
partition.add_replica(cluster.brokers[3], 0)
partition.add_replica(cluster.brokers[4], 1)
return cluster
def set_up_subparser():
aparser = argparse.ArgumentParser(prog='kafka-assigner', description='Rejigger Kafka cluster partitions')
subparsers = aparser.add_subparsers(help='Select manipulation module to use')
return (aparser, subparsers)
| 44.386364
| 109
| 0.727343
| 505
| 3,906
| 5.524752
| 0.106931
| 0.190681
| 0.217921
| 0.298208
| 0.812545
| 0.783154
| 0.753047
| 0.724373
| 0.722939
| 0.697491
| 0
| 0.038473
| 0.128264
| 3,906
| 87
| 110
| 44.896552
| 0.78091
| 0
| 0
| 0.625
| 0
| 0
| 0.111111
| 0.03533
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0375
| false
| 0
| 0.05
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b833bc5aeda68b987fbe29bb93a64775d2442494
| 886,096
|
py
|
Python
|
output-python3/sdpParser.py
|
cameronelliott/sdp-antlr-abnf
|
58240fc806906daefc1844d33f088d007f0119a7
|
[
"MIT"
] | 4
|
2020-08-08T19:04:11.000Z
|
2020-10-15T17:41:57.000Z
|
output-python3/sdpParser.py
|
cameron-elliott/sdp-antlr-abnf
|
58240fc806906daefc1844d33f088d007f0119a7
|
[
"MIT"
] | null | null | null |
output-python3/sdpParser.py
|
cameron-elliott/sdp-antlr-abnf
|
58240fc806906daefc1844d33f088d007f0119a7
|
[
"MIT"
] | null | null | null |
# Generated from sdp.g4 by ANTLR 4.8
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\u0102")
buf.write("\u081f\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\tL\4M\t")
buf.write("M\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\tU\4V\t")
buf.write("V\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4")
buf.write("_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4")
buf.write("h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4")
buf.write("q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4y\ty\4")
buf.write("z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t\u0080")
buf.write("\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084")
buf.write("\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087")
buf.write("\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a\4\u008b")
buf.write("\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e\t\u008e")
buf.write("\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091\4\u0092")
buf.write("\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095\t\u0095")
buf.write("\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098\4\u0099")
buf.write("\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c\t\u009c")
buf.write("\4\u009d\t\u009d\4\u009e\t\u009e\4\u009f\t\u009f\4\u00a0")
buf.write("\t\u00a0\4\u00a1\t\u00a1\4\u00a2\t\u00a2\4\u00a3\t\u00a3")
buf.write("\4\u00a4\t\u00a4\4\u00a5\t\u00a5\4\u00a6\t\u00a6\4\u00a7")
buf.write("\t\u00a7\4\u00a8\t\u00a8\4\u00a9\t\u00a9\4\u00aa\t\u00aa")
buf.write("\4\u00ab\t\u00ab\4\u00ac\t\u00ac\4\u00ad\t\u00ad\3\2\3")
buf.write("\2\3\2\3\2\3\2\3\2\3\2\3\2\5\2\u0163\n\2\3\2\3\2\3\2\3")
buf.write("\2\3\2\3\2\3\3\3\3\3\3\6\3\u016e\n\3\r\3\16\3\u016f\3")
buf.write("\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4")
buf.write("\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\5")
buf.write("\6\u018d\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u0194\n\7\3\b\3\b")
buf.write("\3\b\3\b\3\b\7\b\u019b\n\b\f\b\16\b\u019e\13\b\3\t\3\t")
buf.write("\3\t\3\t\3\t\7\t\u01a5\n\t\f\t\16\t\u01a8\13\t\3\n\3\n")
buf.write("\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13")
buf.write("\3\13\3\13\7\13\u01ba\n\13\f\13\16\13\u01bd\13\13\3\f")
buf.write("\3\f\3\f\3\f\3\f\3\f\3\f\3\f\7\f\u01c7\n\f\f\f\16\f\u01ca")
buf.write("\13\f\3\f\3\f\6\f\u01ce\n\f\r\f\16\f\u01cf\3\f\3\f\3\f")
buf.write("\5\f\u01d5\n\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\6\r\u01df")
buf.write("\n\r\r\r\16\r\u01e0\3\16\3\16\3\16\3\16\3\16\5\16\u01e8")
buf.write("\n\16\3\16\3\16\3\16\3\16\3\16\5\16\u01ef\n\16\3\16\3")
buf.write("\16\7\16\u01f3\n\16\f\16\16\16\u01f6\13\16\3\17\3\17\3")
buf.write("\17\3\17\3\17\5\17\u01fd\n\17\3\20\3\20\3\20\3\20\3\20")
buf.write("\7\20\u0204\n\20\f\20\16\20\u0207\13\20\3\21\3\21\3\21")
buf.write("\7\21\u020c\n\21\f\21\16\21\u020f\13\21\3\21\3\21\3\21")
buf.write("\3\21\7\21\u0215\n\21\f\21\16\21\u0218\13\21\3\22\3\22")
buf.write("\3\22\3\22\3\22\3\22\3\22\5\22\u0221\n\22\3\22\3\22\3")
buf.write("\22\3\22\3\22\6\22\u0228\n\22\r\22\16\22\u0229\3\22\3")
buf.write("\22\3\23\3\23\3\24\6\24\u0231\n\24\r\24\16\24\u0232\3")
buf.write("\25\6\25\u0236\n\25\r\25\16\25\u0237\3\26\3\26\3\27\3")
buf.write("\27\3\30\3\30\3\31\3\31\3\31\5\31\u0243\n\31\3\32\3\32")
buf.write("\6\32\u0247\n\32\r\32\16\32\u0248\3\32\3\32\6\32\u024d")
buf.write("\n\32\r\32\16\32\u024e\3\32\3\32\3\33\6\33\u0254\n\33")
buf.write("\r\33\16\33\u0255\3\33\6\33\u0259\n\33\r\33\16\33\u025a")
buf.write("\3\33\3\33\3\33\3\33\3\34\3\34\7\34\u0263\n\34\f\34\16")
buf.write("\34\u0266\13\34\3\34\3\34\6\34\u026a\n\34\r\34\16\34\u026b")
buf.write("\3\34\3\34\3\34\6\34\u0271\n\34\r\34\16\34\u0272\3\34")
buf.write("\3\34\3\34\3\34\3\34\5\34\u027a\n\34\3\35\5\35\u027d\n")
buf.write("\35\3\35\3\35\3\35\3\35\6\35\u0283\n\35\r\35\16\35\u0284")
buf.write("\3\36\3\36\5\36\u0289\n\36\3\37\3\37\3 \6 \u028e\n \r")
buf.write(" \16 \u028f\3!\3!\5!\u0294\n!\3\"\3\"\5\"\u0298\n\"\3")
buf.write("#\3#\3#\3#\3#\3#\3#\3#\3#\3#\6#\u02a4\n#\r#\16#\u02a5")
buf.write("\3$\3$\7$\u02aa\n$\f$\16$\u02ad\13$\3$\5$\u02b0\n$\3%")
buf.write("\6%\u02b3\n%\r%\16%\u02b4\3%\5%\u02b8\n%\3&\3&\3\'\3\'")
buf.write("\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3")
buf.write("\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\5\'\u02d7")
buf.write("\n\'\3(\7(\u02da\n(\f(\16(\u02dd\13(\3(\5(\u02e0\n(\3")
buf.write(")\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\5*\u02f3")
buf.write("\n*\3+\3+\3+\3+\5+\u02f9\n+\3,\3,\3,\3,\3,\5,\u0300\n")
buf.write(",\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\61\7\61\u030d")
buf.write("\n\61\f\61\16\61\u0310\13\61\3\62\6\62\u0313\n\62\r\62")
buf.write("\16\62\u0314\3\63\3\63\3\63\3\63\5\63\u031b\n\63\3\64")
buf.write("\3\64\3\64\3\64\5\64\u0321\n\64\3\65\3\65\3\65\3\65\3")
buf.write("\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\5\65")
buf.write("\u0331\n\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\5")
buf.write("\66\u033b\n\66\3\67\3\67\3\67\5\67\u0340\n\67\38\38\3")
buf.write("8\38\38\58\u0347\n8\58\u0349\n8\38\58\u034c\n8\39\39\3")
buf.write("9\59\u0351\n9\39\39\39\59\u0356\n9\39\39\39\59\u035b\n")
buf.write("9\39\39\39\69\u0360\n9\r9\169\u0361\3:\3:\3:\3:\3:\3:")
buf.write("\3:\3:\3:\3:\3;\3;\3<\3<\3<\5<\u0373\n<\3=\3=\3=\3=\3")
buf.write("=\3=\5=\u037b\n=\3=\3=\3=\3=\5=\u0381\n=\5=\u0383\n=\3")
buf.write(">\3>\3>\7>\u0388\n>\f>\16>\u038b\13>\3?\3?\3?\3?\3?\3")
buf.write("?\3?\3?\3?\5?\u0396\n?\5?\u0398\n?\3@\3@\3A\3A\3B\3B\3")
buf.write("B\6B\u03a1\nB\rB\16B\u03a2\3C\3C\6C\u03a7\nC\rC\16C\u03a8")
buf.write("\3D\3D\3D\3D\3D\3D\3D\5D\u03b2\nD\3E\6E\u03b5\nE\rE\16")
buf.write("E\u03b6\3F\3F\3F\3F\3F\3F\5F\u03bf\nF\3G\3G\7G\u03c3\n")
buf.write("G\fG\16G\u03c6\13G\3H\3H\5H\u03ca\nH\3I\3I\3J\3J\3J\3")
buf.write("J\3J\3J\3J\6J\u03d5\nJ\rJ\16J\u03d6\3J\3J\3J\3J\3J\3J")
buf.write("\5J\u03df\nJ\3K\3K\3K\3K\3L\3L\3L\5L\u03e8\nL\3M\3M\3")
buf.write("M\5M\u03ed\nM\3N\5N\u03f0\nN\3N\3N\5N\u03f4\nN\3N\7N\u03f7")
buf.write("\nN\fN\16N\u03fa\13N\3N\5N\u03fd\nN\3N\3N\5N\u0401\nN")
buf.write("\3O\3O\3O\5O\u0406\nO\3P\3P\3P\3P\3P\3P\3P\3P\3P\3P\3")
buf.write("P\3P\3P\3P\3P\3P\3P\3P\3P\3P\3P\5P\u041d\nP\3Q\5Q\u0420")
buf.write("\nQ\3Q\6Q\u0423\nQ\rQ\16Q\u0424\3Q\5Q\u0428\nQ\3R\6R\u042b")
buf.write("\nR\rR\16R\u042c\3R\3R\6R\u0431\nR\rR\16R\u0432\7R\u0435")
buf.write("\nR\fR\16R\u0438\13R\3S\5S\u043b\nS\3S\3S\5S\u043f\nS")
buf.write("\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\5T\u044e\nT\3")
buf.write("U\3U\3U\3U\5U\u0454\nU\3V\3V\5V\u0458\nV\3W\5W\u045b\n")
buf.write("W\3W\3W\5W\u045f\nW\3W\7W\u0462\nW\fW\16W\u0465\13W\3")
buf.write("W\5W\u0468\nW\3W\3W\5W\u046c\nW\3X\3X\5X\u0470\nX\3Y\6")
buf.write("Y\u0473\nY\rY\16Y\u0474\3Y\5Y\u0478\nY\3Z\3Z\3Z\5Z\u047d")
buf.write("\nZ\3Z\5Z\u0480\nZ\3[\7[\u0483\n[\f[\16[\u0486\13[\3[")
buf.write("\5[\u0489\n[\3[\6[\u048c\n[\r[\16[\u048d\3[\5[\u0491\n")
buf.write("[\3\\\3\\\3\\\3\\\5\\\u0497\n\\\3]\3]\3]\5]\u049c\n]\3")
buf.write("^\3^\5^\u04a0\n^\3^\7^\u04a3\n^\f^\16^\u04a6\13^\3^\5")
buf.write("^\u04a9\n^\3^\3^\3_\5_\u04ae\n_\3_\6_\u04b1\n_\r_\16_")
buf.write("\u04b2\3_\5_\u04b6\n_\3_\5_\u04b9\n_\3`\3`\3a\3a\3b\3")
buf.write("b\3b\5b\u04c2\nb\3c\3c\3c\3c\3c\5c\u04c9\nc\3d\3d\3d\3")
buf.write("d\7d\u04cf\nd\fd\16d\u04d2\13d\3e\3e\5e\u04d6\ne\3e\3")
buf.write("e\3e\5e\u04db\ne\7e\u04dd\ne\fe\16e\u04e0\13e\3f\5f\u04e3")
buf.write("\nf\3f\3f\3f\3f\3f\5f\u04ea\nf\3g\3g\3g\3h\3h\7h\u04f1")
buf.write("\nh\fh\16h\u04f4\13h\3h\3h\3h\3h\5h\u04fa\nh\3h\3h\5h")
buf.write("\u04fe\nh\7h\u0500\nh\fh\16h\u0503\13h\3i\5i\u0506\ni")
buf.write("\3i\7i\u0509\ni\fi\16i\u050c\13i\3i\3i\3i\3i\5i\u0512")
buf.write("\ni\7i\u0514\ni\fi\16i\u0517\13i\3j\5j\u051a\nj\3j\7j")
buf.write("\u051d\nj\fj\16j\u0520\13j\3j\3j\3j\3j\5j\u0526\nj\7j")
buf.write("\u0528\nj\fj\16j\u052b\13j\3k\5k\u052e\nk\3k\6k\u0531")
buf.write("\nk\rk\16k\u0532\3k\5k\u0536\nk\3l\3l\3l\7l\u053b\nl\f")
buf.write("l\16l\u053e\13l\3m\3m\3m\7m\u0543\nm\fm\16m\u0546\13m")
buf.write("\3n\3n\5n\u054a\nn\3o\6o\u054d\no\ro\16o\u054e\3o\3o\6")
buf.write("o\u0553\no\ro\16o\u0554\7o\u0557\no\fo\16o\u055a\13o\3")
buf.write("p\3p\3p\3p\3p\5p\u0561\np\3q\3q\5q\u0565\nq\3r\3r\5r\u0569")
buf.write("\nr\3s\5s\u056c\ns\3s\3s\3t\5t\u0571\nt\3t\3t\3t\3t\5")
buf.write("t\u0577\nt\3t\5t\u057a\nt\3u\3u\3u\5u\u057f\nu\3u\3u\5")
buf.write("u\u0583\nu\3v\3v\3w\3w\3w\7w\u058a\nw\fw\16w\u058d\13")
buf.write("w\3w\5w\u0590\nw\3x\3x\3x\7x\u0595\nx\fx\16x\u0598\13")
buf.write("x\3x\5x\u059b\nx\3y\3y\3y\5y\u05a0\ny\3z\3z\5z\u05a4\n")
buf.write("z\3{\3{\3|\3|\3}\3}\3~\3~\3~\3\177\3\177\5\177\u05b1\n")
buf.write("\177\3\u0080\3\u0080\3\u0081\3\u0081\3\u0082\3\u0082\3")
buf.write("\u0082\3\u0082\3\u0082\3\u0082\3\u0082\5\u0082\u05be\n")
buf.write("\u0082\3\u0083\3\u0083\3\u0084\3\u0084\3\u0085\3\u0085")
buf.write("\3\u0085\3\u0085\7\u0085\u05c8\n\u0085\f\u0085\16\u0085")
buf.write("\u05cb\13\u0085\3\u0086\3\u0086\3\u0087\3\u0087\3\u0088")
buf.write("\3\u0088\3\u0089\3\u0089\5\u0089\u05d5\n\u0089\3\u008a")
buf.write("\3\u008a\3\u008a\3\u008a\3\u008a\5\u008a\u05dc\n\u008a")
buf.write("\3\u008a\3\u008a\5\u008a\u05e0\n\u008a\3\u008b\3\u008b")
buf.write("\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b")
buf.write("\5\u008b\u05eb\n\u008b\3\u008c\3\u008c\5\u008c\u05ef\n")
buf.write("\u008c\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\5\u008d")
buf.write("\u05f6\n\u008d\3\u008e\3\u008e\3\u008e\5\u008e\u05fb\n")
buf.write("\u008e\3\u008e\3\u008e\5\u008e\u05ff\n\u008e\3\u008f\3")
buf.write("\u008f\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f")
buf.write("\3\u008f\5\u008f\u060a\n\u008f\3\u0090\3\u0090\3\u0090")
buf.write("\3\u0090\3\u0090\3\u0090\7\u0090\u0612\n\u0090\f\u0090")
buf.write("\16\u0090\u0615\13\u0090\3\u0091\3\u0091\3\u0091\5\u0091")
buf.write("\u061a\n\u0091\3\u0091\3\u0091\3\u0091\5\u0091\u061f\n")
buf.write("\u0091\3\u0092\3\u0092\3\u0092\3\u0092\7\u0092\u0625\n")
buf.write("\u0092\f\u0092\16\u0092\u0628\13\u0092\3\u0093\3\u0093")
buf.write("\3\u0093\5\u0093\u062d\n\u0093\3\u0094\7\u0094\u0630\n")
buf.write("\u0094\f\u0094\16\u0094\u0633\13\u0094\3\u0095\3\u0095")
buf.write("\3\u0095\5\u0095\u0638\n\u0095\3\u0095\3\u0095\3\u0096")
buf.write("\3\u0096\6\u0096\u063e\n\u0096\r\u0096\16\u0096\u063f")
buf.write("\3\u0096\3\u0096\3\u0096\3\u0096\6\u0096\u0646\n\u0096")
buf.write("\r\u0096\16\u0096\u0647\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\5\u0097\u0673\n\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\5\u0097\u0689\n\u0097")
buf.write("\3\u0097\5\u0097\u068c\n\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\5\u0097")
buf.write("\u06a5\n\u0097\5\u0097\u06a7\n\u0097\3\u0097\5\u0097\u06aa")
buf.write("\n\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\5\u0097\u06c9\n\u0097\5\u0097\u06cb\n")
buf.write("\u0097\3\u0097\5\u0097\u06ce\n\u0097\3\u0097\3\u0097\3")
buf.write("\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\5\u0097\u06f5\n\u0097\5\u0097\u06f7\n\u0097\3\u0097\5")
buf.write("\u0097\u06fa\n\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3")
buf.write("\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\5\u0097\u072d\n\u0097\5\u0097")
buf.write("\u072f\n\u0097\3\u0097\5\u0097\u0732\n\u0097\3\u0097\3")
buf.write("\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0097\3\u0097\5\u0097\u0777\n\u0097\5\u0097")
buf.write("\u0779\n\u0097\3\u0097\5\u0097\u077c\n\u0097\3\u0097\3")
buf.write("\u0097\5\u0097\u0780\n\u0097\3\u0098\3\u0098\3\u0098\3")
buf.write("\u0098\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098\5\u0098")
buf.write("\u078b\n\u0098\5\u0098\u078d\n\u0098\3\u0099\3\u0099\3")
buf.write("\u0099\3\u0099\3\u0099\5\u0099\u0794\n\u0099\3\u009a\3")
buf.write("\u009a\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a")
buf.write("\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b")
buf.write("\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b")
buf.write("\5\u009b\u07ac\n\u009b\3\u009c\3\u009c\3\u009c\7\u009c")
buf.write("\u07b1\n\u009c\f\u009c\16\u009c\u07b4\13\u009c\3\u009d")
buf.write("\3\u009d\3\u009d\3\u009d\3\u009d\5\u009d\u07bb\n\u009d")
buf.write("\3\u009e\3\u009e\7\u009e\u07bf\n\u009e\f\u009e\16\u009e")
buf.write("\u07c2\13\u009e\3\u009f\3\u009f\3\u009f\3\u009f\7\u009f")
buf.write("\u07c8\n\u009f\f\u009f\16\u009f\u07cb\13\u009f\5\u009f")
buf.write("\u07cd\n\u009f\3\u00a0\3\u00a0\3\u00a0\7\u00a0\u07d2\n")
buf.write("\u00a0\f\u00a0\16\u00a0\u07d5\13\u00a0\3\u00a1\3\u00a1")
buf.write("\3\u00a1\7\u00a1\u07da\n\u00a1\f\u00a1\16\u00a1\u07dd")
buf.write("\13\u00a1\3\u00a2\3\u00a2\3\u00a3\7\u00a3\u07e2\n\u00a3")
buf.write("\f\u00a3\16\u00a3\u07e5\13\u00a3\3\u00a4\6\u00a4\u07e8")
buf.write("\n\u00a4\r\u00a4\16\u00a4\u07e9\3\u00a5\3\u00a5\3\u00a5")
buf.write("\3\u00a5\6\u00a5\u07f0\n\u00a5\r\u00a5\16\u00a5\u07f1")
buf.write("\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6\5\u00a6\u07f9")
buf.write("\n\u00a6\3\u00a7\3\u00a7\3\u00a7\7\u00a7\u07fe\n\u00a7")
buf.write("\f\u00a7\16\u00a7\u0801\13\u00a7\3\u00a8\3\u00a8\3\u00a8")
buf.write("\7\u00a8\u0806\n\u00a8\f\u00a8\16\u00a8\u0809\13\u00a8")
buf.write("\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00aa\3\u00aa\3\u00aa")
buf.write("\3\u00aa\3\u00aa\3\u00aa\5\u00aa\u0815\n\u00aa\3\u00ab")
buf.write("\3\u00ab\5\u00ab\u0819\n\u00ab\3\u00ac\3\u00ac\3\u00ad")
buf.write("\3\u00ad\3\u00ad\2\2\u00ae\2\4\6\b\n\f\16\20\22\24\26")
buf.write("\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\")
buf.write("^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086\u0088\u008a")
buf.write("\u008c\u008e\u0090\u0092\u0094\u0096\u0098\u009a\u009c")
buf.write("\u009e\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa\u00ac\u00ae")
buf.write("\u00b0\u00b2\u00b4\u00b6\u00b8\u00ba\u00bc\u00be\u00c0")
buf.write("\u00c2\u00c4\u00c6\u00c8\u00ca\u00cc\u00ce\u00d0\u00d2")
buf.write("\u00d4\u00d6\u00d8\u00da\u00dc\u00de\u00e0\u00e2\u00e4")
buf.write("\u00e6\u00e8\u00ea\u00ec\u00ee\u00f0\u00f2\u00f4\u00f6")
buf.write("\u00f8\u00fa\u00fc\u00fe\u0100\u0102\u0104\u0106\u0108")
buf.write("\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a")
buf.write("\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c")
buf.write("\u012e\u0130\u0132\u0134\u0136\u0138\u013a\u013c\u013e")
buf.write("\u0140\u0142\u0144\u0146\u0148\u014a\u014c\u014e\u0150")
buf.write("\u0152\u0154\u0156\u0158\2*\6\2JJNNSSYY\3\2\32\37\4\2")
buf.write("\3\3fm\3\2no\4\2\6dp\u0102\3\2\u0083\u0102\3\2\t\r\3\2")
buf.write("\20\21\3\2\23\24\3\2\26\37\3\2\'@\3\2Dd\4\2\6\rp\u0081")
buf.write("\3\2\20!\4\2%d\u0082\u0102\3\2\27\37\3\2\26\32\3\2\26")
buf.write("\33\3\2\7@\3\2\tA\3\2Cd\3\2\7\r\3\2\20A\3\2fm\3\2p\u0081")
buf.write("\3\2G`\3\2\26\27\4\2\3df\u0082\4\2\3\5e\u0081\4\2\'\'")
buf.write("GG\4\2((HH\4\2))II\4\2**JJ\4\2++KK\4\2,,LL\3\2\3\u0102")
buf.write("\3\2\7d\4\2<<\\\\\b\2\t\t\25\25 %&AACC\7\2\7\7\n\n\f")
buf.write("\22!!##\2\u08e4\2\u015a\3\2\2\2\4\u016a\3\2\2\2\6\u0173")
buf.write("\3\2\2\2\b\u0182\3\2\2\2\n\u018c\3\2\2\2\f\u0193\3\2\2")
buf.write("\2\16\u019c\3\2\2\2\20\u01a6\3\2\2\2\22\u01a9\3\2\2\2")
buf.write("\24\u01bb\3\2\2\2\26\u01cd\3\2\2\2\30\u01d6\3\2\2\2\32")
buf.write("\u01e2\3\2\2\2\34\u01fc\3\2\2\2\36\u0205\3\2\2\2 \u0216")
buf.write("\3\2\2\2\"\u0219\3\2\2\2$\u022d\3\2\2\2&\u0230\3\2\2\2")
buf.write("(\u0235\3\2\2\2*\u0239\3\2\2\2,\u023b\3\2\2\2.\u023d\3")
buf.write("\2\2\2\60\u0242\3\2\2\2\62\u0244\3\2\2\2\64\u0253\3\2")
buf.write("\2\2\66\u0279\3\2\2\28\u027c\3\2\2\2:\u0288\3\2\2\2<\u028a")
buf.write("\3\2\2\2>\u028d\3\2\2\2@\u0293\3\2\2\2B\u0297\3\2\2\2")
buf.write("D\u0299\3\2\2\2F\u02a7\3\2\2\2H\u02b2\3\2\2\2J\u02b9\3")
buf.write("\2\2\2L\u02d6\3\2\2\2N\u02db\3\2\2\2P\u02e1\3\2\2\2R\u02f2")
buf.write("\3\2\2\2T\u02f8\3\2\2\2V\u02ff\3\2\2\2X\u0301\3\2\2\2")
buf.write("Z\u0303\3\2\2\2\\\u0305\3\2\2\2^\u0307\3\2\2\2`\u0309")
buf.write("\3\2\2\2b\u0312\3\2\2\2d\u031a\3\2\2\2f\u0320\3\2\2\2")
buf.write("h\u0322\3\2\2\2j\u033a\3\2\2\2l\u033c\3\2\2\2n\u034b\3")
buf.write("\2\2\2p\u0350\3\2\2\2r\u0363\3\2\2\2t\u036d\3\2\2\2v\u036f")
buf.write("\3\2\2\2x\u0382\3\2\2\2z\u0384\3\2\2\2|\u038c\3\2\2\2")
buf.write("~\u0399\3\2\2\2\u0080\u039b\3\2\2\2\u0082\u03a0\3\2\2")
buf.write("\2\u0084\u03a6\3\2\2\2\u0086\u03b1\3\2\2\2\u0088\u03b4")
buf.write("\3\2\2\2\u008a\u03be\3\2\2\2\u008c\u03c0\3\2\2\2\u008e")
buf.write("\u03c9\3\2\2\2\u0090\u03cb\3\2\2\2\u0092\u03de\3\2\2\2")
buf.write("\u0094\u03e0\3\2\2\2\u0096\u03e7\3\2\2\2\u0098\u03ec\3")
buf.write("\2\2\2\u009a\u03ef\3\2\2\2\u009c\u0405\3\2\2\2\u009e\u041c")
buf.write("\3\2\2\2\u00a0\u041f\3\2\2\2\u00a2\u042a\3\2\2\2\u00a4")
buf.write("\u043a\3\2\2\2\u00a6\u044d\3\2\2\2\u00a8\u0453\3\2\2\2")
buf.write("\u00aa\u0457\3\2\2\2\u00ac\u045a\3\2\2\2\u00ae\u046f\3")
buf.write("\2\2\2\u00b0\u0477\3\2\2\2\u00b2\u047f\3\2\2\2\u00b4\u0490")
buf.write("\3\2\2\2\u00b6\u0496\3\2\2\2\u00b8\u049b\3\2\2\2\u00ba")
buf.write("\u049d\3\2\2\2\u00bc\u04b8\3\2\2\2\u00be\u04ba\3\2\2\2")
buf.write("\u00c0\u04bc\3\2\2\2\u00c2\u04c1\3\2\2\2\u00c4\u04c3\3")
buf.write("\2\2\2\u00c6\u04ca\3\2\2\2\u00c8\u04d5\3\2\2\2\u00ca\u04e2")
buf.write("\3\2\2\2\u00cc\u04eb\3\2\2\2\u00ce\u04f2\3\2\2\2\u00d0")
buf.write("\u050a\3\2\2\2\u00d2\u051e\3\2\2\2\u00d4\u0530\3\2\2\2")
buf.write("\u00d6\u0537\3\2\2\2\u00d8\u053f\3\2\2\2\u00da\u0549\3")
buf.write("\2\2\2\u00dc\u054c\3\2\2\2\u00de\u0560\3\2\2\2\u00e0\u0564")
buf.write("\3\2\2\2\u00e2\u0568\3\2\2\2\u00e4\u056b\3\2\2\2\u00e6")
buf.write("\u0579\3\2\2\2\u00e8\u057b\3\2\2\2\u00ea\u0584\3\2\2\2")
buf.write("\u00ec\u058f\3\2\2\2\u00ee\u059a\3\2\2\2\u00f0\u059f\3")
buf.write("\2\2\2\u00f2\u05a3\3\2\2\2\u00f4\u05a5\3\2\2\2\u00f6\u05a7")
buf.write("\3\2\2\2\u00f8\u05a9\3\2\2\2\u00fa\u05ab\3\2\2\2\u00fc")
buf.write("\u05b0\3\2\2\2\u00fe\u05b2\3\2\2\2\u0100\u05b4\3\2\2\2")
buf.write("\u0102\u05bd\3\2\2\2\u0104\u05bf\3\2\2\2\u0106\u05c1\3")
buf.write("\2\2\2\u0108\u05c9\3\2\2\2\u010a\u05cc\3\2\2\2\u010c\u05ce")
buf.write("\3\2\2\2\u010e\u05d0\3\2\2\2\u0110\u05d4\3\2\2\2\u0112")
buf.write("\u05d6\3\2\2\2\u0114\u05ea\3\2\2\2\u0116\u05ee\3\2\2\2")
buf.write("\u0118\u05f0\3\2\2\2\u011a\u05f7\3\2\2\2\u011c\u0609\3")
buf.write("\2\2\2\u011e\u060b\3\2\2\2\u0120\u0619\3\2\2\2\u0122\u0626")
buf.write("\3\2\2\2\u0124\u062c\3\2\2\2\u0126\u0631\3\2\2\2\u0128")
buf.write("\u0634\3\2\2\2\u012a\u063b\3\2\2\2\u012c\u077f\3\2\2\2")
buf.write("\u012e\u0781\3\2\2\2\u0130\u0793\3\2\2\2\u0132\u0795\3")
buf.write("\2\2\2\u0134\u07ab\3\2\2\2\u0136\u07b2\3\2\2\2\u0138\u07ba")
buf.write("\3\2\2\2\u013a\u07c0\3\2\2\2\u013c\u07c3\3\2\2\2\u013e")
buf.write("\u07ce\3\2\2\2\u0140\u07d6\3\2\2\2\u0142\u07de\3\2\2\2")
buf.write("\u0144\u07e3\3\2\2\2\u0146\u07e7\3\2\2\2\u0148\u07ef\3")
buf.write("\2\2\2\u014a\u07f8\3\2\2\2\u014c\u07ff\3\2\2\2\u014e\u0807")
buf.write("\3\2\2\2\u0150\u080a\3\2\2\2\u0152\u0814\3\2\2\2\u0154")
buf.write("\u0818\3\2\2\2\u0156\u081a\3\2\2\2\u0158\u081c\3\2\2\2")
buf.write("\u015a\u015b\5\4\3\2\u015b\u015c\5\6\4\2\u015c\u015d\5")
buf.write("\b\5\2\u015d\u015e\5\n\6\2\u015e\u015f\5\f\7\2\u015f\u0160")
buf.write("\5\16\b\2\u0160\u0162\5\20\t\2\u0161\u0163\5\22\n\2\u0162")
buf.write("\u0161\3\2\2\2\u0162\u0163\3\2\2\2\u0163\u0164\3\2\2\2")
buf.write("\u0164\u0165\5\24\13\2\u0165\u0166\5\26\f\2\u0166\u0167")
buf.write("\5\34\17\2\u0167\u0168\5\36\20\2\u0168\u0169\5 \21\2\u0169")
buf.write("\3\3\2\2\2\u016a\u016b\7\\\2\2\u016b\u016d\7#\2\2\u016c")
buf.write("\u016e\5\u00fe\u0080\2\u016d\u016c\3\2\2\2\u016e\u016f")
buf.write("\3\2\2\2\u016f\u016d\3\2\2\2\u016f\u0170\3\2\2\2\u0170")
buf.write("\u0171\3\2\2\2\u0171\u0172\5\u00fa~\2\u0172\5\3\2\2\2")
buf.write("\u0173\u0174\7U\2\2\u0174\u0175\7#\2\2\u0175\u0176\5$")
buf.write("\23\2\u0176\u0177\5\u010c\u0087\2\u0177\u0178\5&\24\2")
buf.write("\u0178\u0179\5\u010c\u0087\2\u0179\u017a\5(\25\2\u017a")
buf.write("\u017b\5\u010c\u0087\2\u017b\u017c\5*\26\2\u017c\u017d")
buf.write("\5\u010c\u0087\2\u017d\u017e\5,\27\2\u017e\u017f\5\u010c")
buf.write("\u0087\2\u017f\u0180\5d\63\2\u0180\u0181\5\u00fa~\2\u0181")
buf.write("\7\3\2\2\2\u0182\u0183\7Y\2\2\u0183\u0184\7#\2\2\u0184")
buf.write("\u0185\5\u0080A\2\u0185\u0186\5\u00fa~\2\u0186\t\3\2\2")
buf.write("\2\u0187\u0188\7O\2\2\u0188\u0189\7#\2\2\u0189\u018a\5")
buf.write("\u0080A\2\u018a\u018b\5\u00fa~\2\u018b\u018d\3\2\2\2\u018c")
buf.write("\u0187\3\2\2\2\u018c\u018d\3\2\2\2\u018d\13\3\2\2\2\u018e")
buf.write("\u018f\7[\2\2\u018f\u0190\7#\2\2\u0190\u0191\5.\30\2\u0191")
buf.write("\u0192\5\u00fa~\2\u0192\u0194\3\2\2\2\u0193\u018e\3\2")
buf.write("\2\2\u0193\u0194\3\2\2\2\u0194\r\3\2\2\2\u0195\u0196\7")
buf.write("K\2\2\u0196\u0197\7#\2\2\u0197\u0198\5\60\31\2\u0198\u0199")
buf.write("\5\u00fa~\2\u0199\u019b\3\2\2\2\u019a\u0195\3\2\2\2\u019b")
buf.write("\u019e\3\2\2\2\u019c\u019a\3\2\2\2\u019c\u019d\3\2\2\2")
buf.write("\u019d\17\3\2\2\2\u019e\u019c\3\2\2\2\u019f\u01a0\7V\2")
buf.write("\2\u01a0\u01a1\7#\2\2\u01a1\u01a2\5\66\34\2\u01a2\u01a3")
buf.write("\5\u00fa~\2\u01a3\u01a5\3\2\2\2\u01a4\u019f\3\2\2\2\u01a5")
buf.write("\u01a8\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a6\u01a7\3\2\2\2")
buf.write("\u01a7\21\3\2\2\2\u01a8\u01a6\3\2\2\2\u01a9\u01aa\7I\2")
buf.write("\2\u01aa\u01ab\7#\2\2\u01ab\u01ac\5*\26\2\u01ac\u01ad")
buf.write("\5\u010c\u0087\2\u01ad\u01ae\5,\27\2\u01ae\u01af\5\u010c")
buf.write("\u0087\2\u01af\u01b0\5:\36\2\u01b0\u01b1\5\u00fa~\2\u01b1")
buf.write("\23\3\2\2\2\u01b2\u01b3\7H\2\2\u01b3\u01b4\7#\2\2\u01b4")
buf.write("\u01b5\5<\37\2\u01b5\u01b6\7 \2\2\u01b6\u01b7\5> \2\u01b7")
buf.write("\u01b8\5\u00fa~\2\u01b8\u01ba\3\2\2\2\u01b9\u01b2\3\2")
buf.write("\2\2\u01ba\u01bd\3\2\2\2\u01bb\u01b9\3\2\2\2\u01bb\u01bc")
buf.write("\3\2\2\2\u01bc\25\3\2\2\2\u01bd\u01bb\3\2\2\2\u01be\u01bf")
buf.write("\7Z\2\2\u01bf\u01c0\7#\2\2\u01c0\u01c1\5@!\2\u01c1\u01c2")
buf.write("\5\u010c\u0087\2\u01c2\u01c8\5B\"\2\u01c3\u01c4\5\u00fa")
buf.write("~\2\u01c4\u01c5\5\30\r\2\u01c5\u01c7\3\2\2\2\u01c6\u01c3")
buf.write("\3\2\2\2\u01c7\u01ca\3\2\2\2\u01c8\u01c6\3\2\2\2\u01c8")
buf.write("\u01c9\3\2\2\2\u01c9\u01cb\3\2\2\2\u01ca\u01c8\3\2\2\2")
buf.write("\u01cb\u01cc\5\u00fa~\2\u01cc\u01ce\3\2\2\2\u01cd\u01be")
buf.write("\3\2\2\2\u01ce\u01cf\3\2\2\2\u01cf\u01cd\3\2\2\2\u01cf")
buf.write("\u01d0\3\2\2\2\u01d0\u01d4\3\2\2\2\u01d1\u01d2\5\32\16")
buf.write("\2\u01d2\u01d3\5\u00fa~\2\u01d3\u01d5\3\2\2\2\u01d4\u01d1")
buf.write("\3\2\2\2\u01d4\u01d5\3\2\2\2\u01d5\27\3\2\2\2\u01d6\u01d7")
buf.write("\7X\2\2\u01d7\u01d8\7#\2\2\u01d8\u01d9\5F$\2\u01d9\u01da")
buf.write("\5\u010c\u0087\2\u01da\u01de\5H%\2\u01db\u01dc\5\u010c")
buf.write("\u0087\2\u01dc\u01dd\5H%\2\u01dd\u01df\3\2\2\2\u01de\u01db")
buf.write("\3\2\2\2\u01df\u01e0\3\2\2\2\u01e0\u01de\3\2\2\2\u01e0")
buf.write("\u01e1\3\2\2\2\u01e1\31\3\2\2\2\u01e2\u01e3\7`\2\2\u01e3")
buf.write("\u01e4\7#\2\2\u01e4\u01e5\5D#\2\u01e5\u01e7\5\u010c\u0087")
buf.write("\2\u01e6\u01e8\7\23\2\2\u01e7\u01e6\3\2\2\2\u01e7\u01e8")
buf.write("\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01f4\5H%\2\u01ea\u01eb")
buf.write("\5\u010c\u0087\2\u01eb\u01ec\5D#\2\u01ec\u01ee\5\u010c")
buf.write("\u0087\2\u01ed\u01ef\7\23\2\2\u01ee\u01ed\3\2\2\2\u01ee")
buf.write("\u01ef\3\2\2\2\u01ef\u01f0\3\2\2\2\u01f0\u01f1\5H%\2\u01f1")
buf.write("\u01f3\3\2\2\2\u01f2\u01ea\3\2\2\2\u01f3\u01f6\3\2\2\2")
buf.write("\u01f4\u01f2\3\2\2\2\u01f4\u01f5\3\2\2\2\u01f5\33\3\2")
buf.write("\2\2\u01f6\u01f4\3\2\2\2\u01f7\u01f8\7Q\2\2\u01f8\u01f9")
buf.write("\7#\2\2\u01f9\u01fa\5L\'\2\u01fa\u01fb\5\u00fa~\2\u01fb")
buf.write("\u01fd\3\2\2\2\u01fc\u01f7\3\2\2\2\u01fc\u01fd\3\2\2\2")
buf.write("\u01fd\35\3\2\2\2\u01fe\u01ff\7G\2\2\u01ff\u0200\7#\2")
buf.write("\2\u0200\u0201\5V,\2\u0201\u0202\5\u00fa~\2\u0202\u0204")
buf.write("\3\2\2\2\u0203\u01fe\3\2\2\2\u0204\u0207\3\2\2\2\u0205")
buf.write("\u0203\3\2\2\2\u0205\u0206\3\2\2\2\u0206\37\3\2\2\2\u0207")
buf.write("\u0205\3\2\2\2\u0208\u0209\5\"\22\2\u0209\u020d\5\n\6")
buf.write("\2\u020a\u020c\5\22\n\2\u020b\u020a\3\2\2\2\u020c\u020f")
buf.write("\3\2\2\2\u020d\u020b\3\2\2\2\u020d\u020e\3\2\2\2\u020e")
buf.write("\u0210\3\2\2\2\u020f\u020d\3\2\2\2\u0210\u0211\5\24\13")
buf.write("\2\u0211\u0212\5\34\17\2\u0212\u0213\5\36\20\2\u0213\u0215")
buf.write("\3\2\2\2\u0214\u0208\3\2\2\2\u0215\u0218\3\2\2\2\u0216")
buf.write("\u0214\3\2\2\2\u0216\u0217\3\2\2\2\u0217!\3\2\2\2\u0218")
buf.write("\u0216\3\2\2\2\u0219\u021a\7S\2\2\u021a\u021b\7#\2\2\u021b")
buf.write("\u021c\5\\/\2\u021c\u021d\5\u010c\u0087\2\u021d\u0220")
buf.write("\5b\62\2\u021e\u021f\7\25\2\2\u021f\u0221\5\u008cG\2\u0220")
buf.write("\u021e\3\2\2\2\u0220\u0221\3\2\2\2\u0221\u0222\3\2\2\2")
buf.write("\u0222\u0223\5\u010c\u0087\2\u0223\u0227\5`\61\2\u0224")
buf.write("\u0225\5\u010c\u0087\2\u0225\u0226\5^\60\2\u0226\u0228")
buf.write("\3\2\2\2\u0227\u0224\3\2\2\2\u0228\u0229\3\2\2\2\u0229")
buf.write("\u0227\3\2\2\2\u0229\u022a\3\2\2\2\u022a\u022b\3\2\2\2")
buf.write("\u022b\u022c\5\u00fa~\2\u022c#\3\2\2\2\u022d\u022e\5\u0084")
buf.write("C\2\u022e%\3\2\2\2\u022f\u0231\5\u00fe\u0080\2\u0230\u022f")
buf.write("\3\2\2\2\u0231\u0232\3\2\2\2\u0232\u0230\3\2\2\2\u0232")
buf.write("\u0233\3\2\2\2\u0233\'\3\2\2\2\u0234\u0236\5\u00fe\u0080")
buf.write("\2\u0235\u0234\3\2\2\2\u0236\u0237\3\2\2\2\u0237\u0235")
buf.write("\3\2\2\2\u0237\u0238\3\2\2\2\u0238)\3\2\2\2\u0239\u023a")
buf.write("\5\u0088E\2\u023a+\3\2\2\2\u023b\u023c\5\u0088E\2\u023c")
buf.write("-\3\2\2\2\u023d\u023e\5\u0116\u008c\2\u023e/\3\2\2\2\u023f")
buf.write("\u0243\5\62\32\2\u0240\u0243\5\64\33\2\u0241\u0243\5\u0094")
buf.write("K\2\u0242\u023f\3\2\2\2\u0242\u0240\3\2\2\2\u0242\u0241")
buf.write("\3\2\2\2\u0243\61\3\2\2\2\u0244\u0246\5\u0094K\2\u0245")
buf.write("\u0247\5\u010c\u0087\2\u0246\u0245\3\2\2\2\u0247\u0248")
buf.write("\3\2\2\2\u0248\u0246\3\2\2\2\u0248\u0249\3\2\2\2\u0249")
buf.write("\u024a\3\2\2\2\u024a\u024c\7\16\2\2\u024b\u024d\5\u008a")
buf.write("F\2\u024c\u024b\3\2\2\2\u024d\u024e\3\2\2\2\u024e\u024c")
buf.write("\3\2\2\2\u024e\u024f\3\2\2\2\u024f\u0250\3\2\2\2\u0250")
buf.write("\u0251\7\17\2\2\u0251\63\3\2\2\2\u0252\u0254\5\u008aF")
buf.write("\2\u0253\u0252\3\2\2\2\u0254\u0255\3\2\2\2\u0255\u0253")
buf.write("\3\2\2\2\u0255\u0256\3\2\2\2\u0256\u0258\3\2\2\2\u0257")
buf.write("\u0259\5\u010c\u0087\2\u0258\u0257\3\2\2\2\u0259\u025a")
buf.write("\3\2\2\2\u025a\u0258\3\2\2\2\u025a\u025b\3\2\2\2\u025b")
buf.write("\u025c\3\2\2\2\u025c\u025d\7\"\2\2\u025d\u025e\5\u0094")
buf.write("K\2\u025e\u025f\7$\2\2\u025f\65\3\2\2\2\u0260\u0264\5")
buf.write("8\35\2\u0261\u0263\5\u010c\u0087\2\u0262\u0261\3\2\2\2")
buf.write("\u0263\u0266\3\2\2\2\u0264\u0262\3\2\2\2\u0264\u0265\3")
buf.write("\2\2\2\u0265\u0267\3\2\2\2\u0266\u0264\3\2\2\2\u0267\u0269")
buf.write("\7\16\2\2\u0268\u026a\5\u008aF\2\u0269\u0268\3\2\2\2\u026a")
buf.write("\u026b\3\2\2\2\u026b\u0269\3\2\2\2\u026b\u026c\3\2\2\2")
buf.write("\u026c\u026d\3\2\2\2\u026d\u026e\7\17\2\2\u026e\u027a")
buf.write("\3\2\2\2\u026f\u0271\5\u008aF\2\u0270\u026f\3\2\2\2\u0271")
buf.write("\u0272\3\2\2\2\u0272\u0270\3\2\2\2\u0272\u0273\3\2\2\2")
buf.write("\u0273\u0274\3\2\2\2\u0274\u0275\7\"\2\2\u0275\u0276\5")
buf.write("8\35\2\u0276\u0277\7$\2\2\u0277\u027a\3\2\2\2\u0278\u027a")
buf.write("\58\35\2\u0279\u0260\3\2\2\2\u0279\u0270\3\2\2\2\u0279")
buf.write("\u0278\3\2\2\2\u027a\67\3\2\2\2\u027b\u027d\7\21\2\2\u027c")
buf.write("\u027b\3\2\2\2\u027c\u027d\3\2\2\2\u027d\u027e\3\2\2\2")
buf.write("\u027e\u0282\5\u00fe\u0080\2\u027f\u0283\5\u010c\u0087")
buf.write("\2\u0280\u0283\7\23\2\2\u0281\u0283\5\u00fe\u0080\2\u0282")
buf.write("\u027f\3\2\2\2\u0282\u0280\3\2\2\2\u0282\u0281\3\2\2\2")
buf.write("\u0283\u0284\3\2\2\2\u0284\u0282\3\2\2\2\u0284\u0285\3")
buf.write("\2\2\2\u02859\3\2\2\2\u0286\u0289\5f\64\2\u0287\u0289")
buf.write("\5d\63\2\u0288\u0286\3\2\2\2\u0288\u0287\3\2\2\2\u0289")
buf.write(";\3\2\2\2\u028a\u028b\5\u0088E\2\u028b=\3\2\2\2\u028c")
buf.write("\u028e\5\u00fe\u0080\2\u028d\u028c\3\2\2\2\u028e\u028f")
buf.write("\3\2\2\2\u028f\u028d\3\2\2\2\u028f\u0290\3\2\2\2\u0290")
buf.write("?\3\2\2\2\u0291\u0294\5D#\2\u0292\u0294\7\26\2\2\u0293")
buf.write("\u0291\3\2\2\2\u0293\u0292\3\2\2\2\u0294A\3\2\2\2\u0295")
buf.write("\u0298\5D#\2\u0296\u0298\7\26\2\2\u0297\u0295\3\2\2\2")
buf.write("\u0297\u0296\3\2\2\2\u0298C\3\2\2\2\u0299\u029a\5\u0090")
buf.write("I\2\u029a\u029b\5\u00fe\u0080\2\u029b\u029c\5\u00fe\u0080")
buf.write("\2\u029c\u029d\5\u00fe\u0080\2\u029d\u029e\5\u00fe\u0080")
buf.write("\2\u029e\u029f\5\u00fe\u0080\2\u029f\u02a0\5\u00fe\u0080")
buf.write("\2\u02a0\u02a1\5\u00fe\u0080\2\u02a1\u02a3\5\u00fe\u0080")
buf.write("\2\u02a2\u02a4\5\u00fe\u0080\2\u02a3\u02a2\3\2\2\2\u02a4")
buf.write("\u02a5\3\2\2\2\u02a5\u02a3\3\2\2\2\u02a5\u02a6\3\2\2\2")
buf.write("\u02a6E\3\2\2\2\u02a7\u02ab\5\u0090I\2\u02a8\u02aa\5\u00fe")
buf.write("\u0080\2\u02a9\u02a8\3\2\2\2\u02aa\u02ad\3\2\2\2\u02ab")
buf.write("\u02a9\3\2\2\2\u02ab\u02ac\3\2\2\2\u02ac\u02af\3\2\2\2")
buf.write("\u02ad\u02ab\3\2\2\2\u02ae\u02b0\5J&\2\u02af\u02ae\3\2")
buf.write("\2\2\u02af\u02b0\3\2\2\2\u02b0G\3\2\2\2\u02b1\u02b3\5")
buf.write("\u00fe\u0080\2\u02b2\u02b1\3\2\2\2\u02b3\u02b4\3\2\2\2")
buf.write("\u02b4\u02b2\3\2\2\2\u02b4\u02b5\3\2\2\2\u02b5\u02b7\3")
buf.write("\2\2\2\u02b6\u02b8\5J&\2\u02b7\u02b6\3\2\2\2\u02b7\u02b8")
buf.write("\3\2\2\2\u02b8I\3\2\2\2\u02b9\u02ba\t\2\2\2\u02baK\3\2")
buf.write("\2\2\u02bb\u02bc\7V\2\2\u02bc\u02bd\7X\2\2\u02bd\u02be")
buf.write("\7U\2\2\u02be\u02bf\7S\2\2\u02bf\u02c0\7V\2\2\u02c0\u02d7")
buf.write("\7Z\2\2\u02c1\u02c2\7I\2\2\u02c2\u02c3\7R\2\2\u02c3\u02c4")
buf.write("\7K\2\2\u02c4\u02c5\7G\2\2\u02c5\u02c6\7X\2\2\u02c6\u02c7")
buf.write("\7 \2\2\u02c7\u02d7\5\u0080A\2\u02c8\u02c9\7H\2\2\u02c9")
buf.write("\u02ca\7G\2\2\u02ca\u02cb\7Y\2\2\u02cb\u02cc\7K\2\2\u02cc")
buf.write("\u02cd\7\34\2\2\u02cd\u02ce\7\32\2\2\u02ce\u02cf\7 \2")
buf.write("\2\u02cf\u02d0\3\2\2\2\u02d0\u02d7\5N(\2\u02d1\u02d2\7")
buf.write("[\2\2\u02d2\u02d3\7X\2\2\u02d3\u02d4\7O\2\2\u02d4\u02d5")
buf.write("\7 \2\2\u02d5\u02d7\5.\30\2\u02d6\u02bb\3\2\2\2\u02d6")
buf.write("\u02c1\3\2\2\2\u02d6\u02c8\3\2\2\2\u02d6\u02d1\3\2\2\2")
buf.write("\u02d7M\3\2\2\2\u02d8\u02da\5P)\2\u02d9\u02d8\3\2\2\2")
buf.write("\u02da\u02dd\3\2\2\2\u02db\u02d9\3\2\2\2\u02db\u02dc\3")
buf.write("\2\2\2\u02dc\u02df\3\2\2\2\u02dd\u02db\3\2\2\2\u02de\u02e0")
buf.write("\5R*\2\u02df\u02de\3\2\2\2\u02df\u02e0\3\2\2\2\u02e0O")
buf.write("\3\2\2\2\u02e1\u02e2\5T+\2\u02e2\u02e3\5T+\2\u02e3\u02e4")
buf.write("\5T+\2\u02e4\u02e5\5T+\2\u02e5Q\3\2\2\2\u02e6\u02e7\5")
buf.write("T+\2\u02e7\u02e8\5T+\2\u02e8\u02e9\3\2\2\2\u02e9\u02ea")
buf.write("\7#\2\2\u02ea\u02eb\7#\2\2\u02eb\u02f3\3\2\2\2\u02ec\u02ed")
buf.write("\5T+\2\u02ed\u02ee\5T+\2\u02ee\u02ef\5T+\2\u02ef\u02f0")
buf.write("\3\2\2\2\u02f0\u02f1\7#\2\2\u02f1\u02f3\3\2\2\2\u02f2")
buf.write("\u02e6\3\2\2\2\u02f2\u02ec\3\2\2\2\u02f3S\3\2\2\2\u02f4")
buf.write("\u02f9\5\u00f2z\2\u02f5\u02f9\5\u00fe\u0080\2\u02f6\u02f9")
buf.write("\7\21\2\2\u02f7\u02f9\7\25\2\2\u02f8\u02f4\3\2\2\2\u02f8")
buf.write("\u02f5\3\2\2\2\u02f8\u02f6\3\2\2\2\u02f8\u02f7\3\2\2\2")
buf.write("\u02f9U\3\2\2\2\u02fa\u02fb\5X-\2\u02fb\u02fc\7 \2\2\u02fc")
buf.write("\u02fd\5Z.\2\u02fd\u0300\3\2\2\2\u02fe\u0300\5X-\2\u02ff")
buf.write("\u02fa\3\2\2\2\u02ff\u02fe\3\2\2\2\u0300W\3\2\2\2\u0301")
buf.write("\u0302\5\u0088E\2\u0302Y\3\2\2\2\u0303\u0304\5\u0082B")
buf.write("\2\u0304[\3\2\2\2\u0305\u0306\5\u0088E\2\u0306]\3\2\2")
buf.write("\2\u0307\u0308\5\u0088E\2\u0308_\3\2\2\2\u0309\u030e\5")
buf.write("\u0088E\2\u030a\u030b\7\25\2\2\u030b\u030d\5\u0088E\2")
buf.write("\u030c\u030a\3\2\2\2\u030d\u0310\3\2\2\2\u030e\u030c\3")
buf.write("\2\2\2\u030e\u030f\3\2\2\2\u030fa\3\2\2\2\u0310\u030e")
buf.write("\3\2\2\2\u0311\u0313\5\u00fe\u0080\2\u0312\u0311\3\2\2")
buf.write("\2\u0313\u0314\3\2\2\2\u0314\u0312\3\2\2\2\u0314\u0315")
buf.write("\3\2\2\2\u0315c\3\2\2\2\u0316\u031b\5r:\2\u0317\u031b")
buf.write("\5v<\2\u0318\u031b\5p9\2\u0319\u031b\5~@\2\u031a\u0316")
buf.write("\3\2\2\2\u031a\u0317\3\2\2\2\u031a\u0318\3\2\2\2\u031a")
buf.write("\u0319\3\2\2\2\u031be\3\2\2\2\u031c\u0321\5h\65\2\u031d")
buf.write("\u0321\5l\67\2\u031e\u0321\5p9\2\u031f\u0321\5~@\2\u0320")
buf.write("\u031c\3\2\2\2\u0320\u031d\3\2\2\2\u0320\u031e\3\2\2\2")
buf.write("\u0320\u031f\3\2\2\2\u0321g\3\2\2\2\u0322\u0323\5j\66")
buf.write("\2\u0323\u0324\7\24\2\2\u0324\u0325\5\u0092J\2\u0325\u0326")
buf.write("\3\2\2\2\u0326\u0327\7\24\2\2\u0327\u0328\5\u0092J\2\u0328")
buf.write("\u0329\3\2\2\2\u0329\u032a\7\24\2\2\u032a\u032b\5\u0092")
buf.write("J\2\u032b\u032c\3\2\2\2\u032c\u032d\7\25\2\2\u032d\u0330")
buf.write("\5n8\2\u032e\u032f\7\25\2\2\u032f\u0331\5\u008cG\2\u0330")
buf.write("\u032e\3\2\2\2\u0330\u0331\3\2\2\2\u0331i\3\2\2\2\u0332")
buf.write("\u0333\7\30\2\2\u0333\u0334\7\30\2\2\u0334\u0335\3\2\2")
buf.write("\2\u0335\u033b\t\3\2\2\u0336\u0337\7\30\2\2\u0337\u0338")
buf.write("\7\31\2\2\u0338\u0339\3\2\2\2\u0339\u033b\5\u00fe\u0080")
buf.write("\2\u033a\u0332\3\2\2\2\u033a\u0336\3\2\2\2\u033bk\3\2")
buf.write("\2\2\u033c\u033f\5x=\2\u033d\u033e\7\25\2\2\u033e\u0340")
buf.write("\5\u008cG\2\u033f\u033d\3\2\2\2\u033f\u0340\3\2\2\2\u0340")
buf.write("m\3\2\2\2\u0341\u0348\5\u0090I\2\u0342\u0343\5\u00fe\u0080")
buf.write("\2\u0343\u0344\5\u00fe\u0080\2\u0344\u0349\3\2\2\2\u0345")
buf.write("\u0347\5\u00fe\u0080\2\u0346\u0345\3\2\2\2\u0346\u0347")
buf.write("\3\2\2\2\u0347\u0349\3\2\2\2\u0348\u0342\3\2\2\2\u0348")
buf.write("\u0346\3\2\2\2\u0349\u034c\3\2\2\2\u034a\u034c\7\26\2")
buf.write("\2\u034b\u0341\3\2\2\2\u034b\u034a\3\2\2\2\u034co\3\2")
buf.write("\2\2\u034d\u0351\5\u008eH\2\u034e\u0351\7\23\2\2\u034f")
buf.write("\u0351\7\24\2\2\u0350\u034d\3\2\2\2\u0350\u034e\3\2\2")
buf.write("\2\u0350\u034f\3\2\2\2\u0351\u0355\3\2\2\2\u0352\u0356")
buf.write("\5\u008eH\2\u0353\u0356\7\23\2\2\u0354\u0356\7\24\2\2")
buf.write("\u0355\u0352\3\2\2\2\u0355\u0353\3\2\2\2\u0355\u0354\3")
buf.write("\2\2\2\u0356\u035a\3\2\2\2\u0357\u035b\5\u008eH\2\u0358")
buf.write("\u035b\7\23\2\2\u0359\u035b\7\24\2\2\u035a\u0357\3\2\2")
buf.write("\2\u035a\u0358\3\2\2\2\u035a\u0359\3\2\2\2\u035b\u035f")
buf.write("\3\2\2\2\u035c\u0360\5\u008eH\2\u035d\u0360\7\23\2\2\u035e")
buf.write("\u0360\7\24\2\2\u035f\u035c\3\2\2\2\u035f\u035d\3\2\2")
buf.write("\2\u035f\u035e\3\2\2\2\u0360\u0361\3\2\2\2\u0361\u035f")
buf.write("\3\2\2\2\u0361\u0362\3\2\2\2\u0362q\3\2\2\2\u0363\u0364")
buf.write("\5t;\2\u0364\u0365\7\24\2\2\u0365\u0366\5\u0092J\2\u0366")
buf.write("\u0367\3\2\2\2\u0367\u0368\7\24\2\2\u0368\u0369\5\u0092")
buf.write("J\2\u0369\u036a\3\2\2\2\u036a\u036b\7\24\2\2\u036b\u036c")
buf.write("\5\u0092J\2\u036cs\3\2\2\2\u036d\u036e\5\u0092J\2\u036e")
buf.write("u\3\2\2\2\u036f\u0372\5x=\2\u0370\u0371\7 \2\2\u0371\u0373")
buf.write("\5r:\2\u0372\u0370\3\2\2\2\u0372\u0373\3\2\2\2\u0373w")
buf.write("\3\2\2\2\u0374\u0383\5z>\2\u0375\u0376\5z>\2\u0376\u0377")
buf.write("\7 \2\2\u0377\u0378\7 \2\2\u0378\u037a\3\2\2\2\u0379\u037b")
buf.write("\5z>\2\u037a\u0379\3\2\2\2\u037a\u037b\3\2\2\2\u037b\u0383")
buf.write("\3\2\2\2\u037c\u037d\7 \2\2\u037d\u037e\7 \2\2\u037e\u0380")
buf.write("\3\2\2\2\u037f\u0381\5z>\2\u0380\u037f\3\2\2\2\u0380\u0381")
buf.write("\3\2\2\2\u0381\u0383\3\2\2\2\u0382\u0374\3\2\2\2\u0382")
buf.write("\u0375\3\2\2\2\u0382\u037c\3\2\2\2\u0383y\3\2\2\2\u0384")
buf.write("\u0389\5|?\2\u0385\u0386\7 \2\2\u0386\u0388\5|?\2\u0387")
buf.write("\u0385\3\2\2\2\u0388\u038b\3\2\2\2\u0389\u0387\3\2\2\2")
buf.write("\u0389\u038a\3\2\2\2\u038a{\3\2\2\2\u038b\u0389\3\2\2")
buf.write("\2\u038c\u0397\5\u0102\u0082\2\u038d\u038e\5\u0102\u0082")
buf.write("\2\u038e\u038f\5\u0102\u0082\2\u038f\u0390\5\u0102\u0082")
buf.write("\2\u0390\u0398\3\2\2\2\u0391\u0392\5\u0102\u0082\2\u0392")
buf.write("\u0393\5\u0102\u0082\2\u0393\u0398\3\2\2\2\u0394\u0396")
buf.write("\5\u0102\u0082\2\u0395\u0394\3\2\2\2\u0395\u0396\3\2\2")
buf.write("\2\u0396\u0398\3\2\2\2\u0397\u038d\3\2\2\2\u0397\u0391")
buf.write("\3\2\2\2\u0397\u0395\3\2\2\2\u0398}\3\2\2\2\u0399\u039a")
buf.write("\5\u0084C\2\u039a\177\3\2\2\2\u039b\u039c\5\u0082B\2\u039c")
buf.write("\u0081\3\2\2\2\u039d\u03a1\t\4\2\2\u039e\u03a1\t\5\2\2")
buf.write("\u039f\u03a1\t\6\2\2\u03a0\u039d\3\2\2\2\u03a0\u039e\3")
buf.write("\2\2\2\u03a0\u039f\3\2\2\2\u03a1\u03a2\3\2\2\2\u03a2\u03a0")
buf.write("\3\2\2\2\u03a2\u03a3\3\2\2\2\u03a3\u0083\3\2\2\2\u03a4")
buf.write("\u03a7\5\u010e\u0088\2\u03a5\u03a7\t\7\2\2\u03a6\u03a4")
buf.write("\3\2\2\2\u03a6\u03a5\3\2\2\2\u03a7\u03a8\3\2\2\2\u03a8")
buf.write("\u03a6\3\2\2\2\u03a8\u03a9\3\2\2\2\u03a9\u0085\3\2\2\2")
buf.write("\u03aa\u03b2\7\7\2\2\u03ab\u03b2\t\b\2\2\u03ac\u03b2\t")
buf.write("\t\2\2\u03ad\u03b2\t\n\2\2\u03ae\u03b2\t\13\2\2\u03af")
buf.write("\u03b2\t\f\2\2\u03b0\u03b2\t\r\2\2\u03b1\u03aa\3\2\2\2")
buf.write("\u03b1\u03ab\3\2\2\2\u03b1\u03ac\3\2\2\2\u03b1\u03ad\3")
buf.write("\2\2\2\u03b1\u03ae\3\2\2\2\u03b1\u03af\3\2\2\2\u03b1\u03b0")
buf.write("\3\2\2\2\u03b2\u0087\3\2\2\2\u03b3\u03b5\5\u0086D\2\u03b4")
buf.write("\u03b3\3\2\2\2\u03b5\u03b6\3\2\2\2\u03b6\u03b4\3\2\2\2")
buf.write("\u03b6\u03b7\3\2\2\2\u03b7\u0089\3\2\2\2\u03b8\u03bf\t")
buf.write("\4\2\2\u03b9\u03bf\t\5\2\2\u03ba\u03bf\t\16\2\2\u03bb")
buf.write("\u03bf\t\17\2\2\u03bc\u03bf\7#\2\2\u03bd\u03bf\t\20\2")
buf.write("\2\u03be\u03b8\3\2\2\2\u03be\u03b9\3\2\2\2\u03be\u03ba")
buf.write("\3\2\2\2\u03be\u03bb\3\2\2\2\u03be\u03bc\3\2\2\2\u03be")
buf.write("\u03bd\3\2\2\2\u03bf\u008b\3\2\2\2\u03c0\u03c4\5\u0090")
buf.write("I\2\u03c1\u03c3\5\u00fe\u0080\2\u03c2\u03c1\3\2\2\2\u03c3")
buf.write("\u03c6\3\2\2\2\u03c4\u03c2\3\2\2\2\u03c4\u03c5\3\2\2\2")
buf.write("\u03c5\u008d\3\2\2\2\u03c6\u03c4\3\2\2\2\u03c7\u03ca\5")
buf.write("\u00f2z\2\u03c8\u03ca\5\u00fe\u0080\2\u03c9\u03c7\3\2")
buf.write("\2\2\u03c9\u03c8\3\2\2\2\u03ca\u008f\3\2\2\2\u03cb\u03cc")
buf.write("\t\21\2\2\u03cc\u0091\3\2\2\2\u03cd\u03df\5\u00fe\u0080")
buf.write("\2\u03ce\u03cf\5\u0090I\2\u03cf\u03d0\5\u00fe\u0080\2")
buf.write("\u03d0\u03df\3\2\2\2\u03d1\u03d2\7\27\2\2\u03d2\u03d4")
buf.write("\5\u00fe\u0080\2\u03d3\u03d5\5\u00fe\u0080\2\u03d4\u03d3")
buf.write("\3\2\2\2\u03d5\u03d6\3\2\2\2\u03d6\u03d4\3\2\2\2\u03d6")
buf.write("\u03d7\3\2\2\2\u03d7\u03df\3\2\2\2\u03d8\u03d9\7\30\2")
buf.write("\2\u03d9\u03da\t\22\2\2\u03da\u03df\5\u00fe\u0080\2\u03db")
buf.write("\u03dc\7\30\2\2\u03dc\u03dd\7\33\2\2\u03dd\u03df\t\23")
buf.write("\2\2\u03de\u03cd\3\2\2\2\u03de\u03ce\3\2\2\2\u03de\u03d1")
buf.write("\3\2\2\2\u03de\u03d8\3\2\2\2\u03de\u03db\3\2\2\2\u03df")
buf.write("\u0093\3\2\2\2\u03e0\u03e1\5\u0096L\2\u03e1\u03e2\7&\2")
buf.write("\2\u03e2\u03e3\5\u0098M\2\u03e3\u0095\3\2\2\2\u03e4\u03e8")
buf.write("\5\u00a4S\2\u03e5\u03e8\5\u00acW\2\u03e6\u03e8\5\u00d6")
buf.write("l\2\u03e7\u03e4\3\2\2\2\u03e7\u03e5\3\2\2\2\u03e7\u03e6")
buf.write("\3\2\2\2\u03e8\u0097\3\2\2\2\u03e9\u03ed\5\u00a4S\2\u03ea")
buf.write("\u03ed\5\u009aN\2\u03eb\u03ed\5\u00d8m\2\u03ec\u03e9\3")
buf.write("\2\2\2\u03ec\u03ea\3\2\2\2\u03ec\u03eb\3\2\2\2\u03ed\u0099")
buf.write("\3\2\2\2\u03ee\u03f0\5\u00bc_\2\u03ef\u03ee\3\2\2\2\u03ef")
buf.write("\u03f0\3\2\2\2\u03f0\u03f1\3\2\2\2\u03f1\u03f8\7A\2\2")
buf.write("\u03f2\u03f4\5\u00b4[\2\u03f3\u03f2\3\2\2\2\u03f3\u03f4")
buf.write("\3\2\2\2\u03f4\u03f5\3\2\2\2\u03f5\u03f7\5\u009cO\2\u03f6")
buf.write("\u03f3\3\2\2\2\u03f7\u03fa\3\2\2\2\u03f8\u03f6\3\2\2\2")
buf.write("\u03f8\u03f9\3\2\2\2\u03f9\u03fc\3\2\2\2\u03fa\u03f8\3")
buf.write("\2\2\2\u03fb\u03fd\5\u00b4[\2\u03fc\u03fb\3\2\2\2\u03fc")
buf.write("\u03fd\3\2\2\2\u03fd\u03fe\3\2\2\2\u03fe\u0400\7C\2\2")
buf.write("\u03ff\u0401\5\u00bc_\2\u0400\u03ff\3\2\2\2\u0400\u0401")
buf.write("\3\2\2\2\u0401\u009b\3\2\2\2\u0402\u0406\t\24\2\2\u0403")
buf.write("\u0406\t\r\2\2\u0404\u0406\5\u00dan\2\u0405\u0402\3\2")
buf.write("\2\2\u0405\u0403\3\2\2\2\u0405\u0404\3\2\2\2\u0406\u009d")
buf.write("\3\2\2\2\u0407\u041d\5\u00f2z\2\u0408\u041d\5\u00fe\u0080")
buf.write("\2\u0409\u041d\7\7\2\2\u040a\u041d\7\t\2\2\u040b\u041d")
buf.write("\7\n\2\2\u040c\u041d\7\13\2\2\u040d\u041d\7\f\2\2\u040e")
buf.write("\u041d\7\r\2\2\u040f\u041d\7\20\2\2\u0410\u041d\7\21\2")
buf.write("\2\u0411\u041d\7\23\2\2\u0412\u041d\7\25\2\2\u0413\u041d")
buf.write("\7#\2\2\u0414\u041d\7%\2\2\u0415\u041d\7D\2\2\u0416\u041d")
buf.write("\7E\2\2\u0417\u041d\7F\2\2\u0418\u041d\7a\2\2\u0419\u041d")
buf.write("\7b\2\2\u041a\u041d\7c\2\2\u041b\u041d\7d\2\2\u041c\u0407")
buf.write("\3\2\2\2\u041c\u0408\3\2\2\2\u041c\u0409\3\2\2\2\u041c")
buf.write("\u040a\3\2\2\2\u041c\u040b\3\2\2\2\u041c\u040c\3\2\2\2")
buf.write("\u041c\u040d\3\2\2\2\u041c\u040e\3\2\2\2\u041c\u040f\3")
buf.write("\2\2\2\u041c\u0410\3\2\2\2\u041c\u0411\3\2\2\2\u041c\u0412")
buf.write("\3\2\2\2\u041c\u0413\3\2\2\2\u041c\u0414\3\2\2\2\u041c")
buf.write("\u0415\3\2\2\2\u041c\u0416\3\2\2\2\u041c\u0417\3\2\2\2")
buf.write("\u041c\u0418\3\2\2\2\u041c\u0419\3\2\2\2\u041c\u041a\3")
buf.write("\2\2\2\u041c\u041b\3\2\2\2\u041d\u009f\3\2\2\2\u041e\u0420")
buf.write("\5\u00bc_\2\u041f\u041e\3\2\2\2\u041f\u0420\3\2\2\2\u0420")
buf.write("\u0422\3\2\2\2\u0421\u0423\5\u009eP\2\u0422\u0421\3\2")
buf.write("\2\2\u0423\u0424\3\2\2\2\u0424\u0422\3\2\2\2\u0424\u0425")
buf.write("\3\2\2\2\u0425\u0427\3\2\2\2\u0426\u0428\5\u00bc_\2\u0427")
buf.write("\u0426\3\2\2\2\u0427\u0428\3\2\2\2\u0428\u00a1\3\2\2\2")
buf.write("\u0429\u042b\5\u009eP\2\u042a\u0429\3\2\2\2\u042b\u042c")
buf.write("\3\2\2\2\u042c\u042a\3\2\2\2\u042c\u042d\3\2\2\2\u042d")
buf.write("\u0436\3\2\2\2\u042e\u0430\7\24\2\2\u042f\u0431\5\u009e")
buf.write("P\2\u0430\u042f\3\2\2\2\u0431\u0432\3\2\2\2\u0432\u0430")
buf.write("\3\2\2\2\u0432\u0433\3\2\2\2\u0433\u0435\3\2\2\2\u0434")
buf.write("\u042e\3\2\2\2\u0435\u0438\3\2\2\2\u0436\u0434\3\2\2\2")
buf.write("\u0436\u0437\3\2\2\2\u0437\u00a3\3\2\2\2\u0438\u0436\3")
buf.write("\2\2\2\u0439\u043b\5\u00bc_\2\u043a\u0439\3\2\2\2\u043a")
buf.write("\u043b\3\2\2\2\u043b\u043c\3\2\2\2\u043c\u043e\5\u00a2")
buf.write("R\2\u043d\u043f\5\u00bc_\2\u043e\u043d\3\2\2\2\u043e\u043f")
buf.write("\3\2\2\2\u043f\u00a5\3\2\2\2\u0440\u044e\7\16\2\2\u0441")
buf.write("\u044e\7\17\2\2\u0442\u044e\7\"\2\2\u0443\u044e\7$\2\2")
buf.write("\u0444\u044e\7A\2\2\u0445\u044e\7C\2\2\u0446\u044e\7 ")
buf.write("\2\2\u0447\u044e\7!\2\2\u0448\u044e\7&\2\2\u0449\u044e")
buf.write("\7B\2\2\u044a\u044e\7\22\2\2\u044b\u044e\7\24\2\2\u044c")
buf.write("\u044e\5\u0100\u0081\2\u044d\u0440\3\2\2\2\u044d\u0441")
buf.write("\3\2\2\2\u044d\u0442\3\2\2\2\u044d\u0443\3\2\2\2\u044d")
buf.write("\u0444\3\2\2\2\u044d\u0445\3\2\2\2\u044d\u0446\3\2\2\2")
buf.write("\u044d\u0447\3\2\2\2\u044d\u0448\3\2\2\2\u044d\u0449\3")
buf.write("\2\2\2\u044d\u044a\3\2\2\2\u044d\u044b\3\2\2\2\u044d\u044c")
buf.write("\3\2\2\2\u044e\u00a7\3\2\2\2\u044f\u0454\7\7\2\2\u0450")
buf.write("\u0454\t\25\2\2\u0451\u0454\t\26\2\2\u0452\u0454\5\u00c0")
buf.write("a\2\u0453\u044f\3\2\2\2\u0453\u0450\3\2\2\2\u0453\u0451")
buf.write("\3\2\2\2\u0453\u0452\3\2\2\2\u0454\u00a9\3\2\2\2\u0455")
buf.write("\u0458\5\u00a8U\2\u0456\u0458\5\u00b2Z\2\u0457\u0455\3")
buf.write("\2\2\2\u0457\u0456\3\2\2\2\u0458\u00ab\3\2\2\2\u0459\u045b")
buf.write("\5\u00bc_\2\u045a\u0459\3\2\2\2\u045a\u045b\3\2\2\2\u045b")
buf.write("\u045c\3\2\2\2\u045c\u0463\5\u0100\u0081\2\u045d\u045f")
buf.write("\5\u00b4[\2\u045e\u045d\3\2\2\2\u045e\u045f\3\2\2\2\u045f")
buf.write("\u0460\3\2\2\2\u0460\u0462\5\u00aaV\2\u0461\u045e\3\2")
buf.write("\2\2\u0462\u0465\3\2\2\2\u0463\u0461\3\2\2\2\u0463\u0464")
buf.write("\3\2\2\2\u0464\u0467\3\2\2\2\u0465\u0463\3\2\2\2\u0466")
buf.write("\u0468\5\u00b4[\2\u0467\u0466\3\2\2\2\u0467\u0468\3\2")
buf.write("\2\2\u0468\u0469\3\2\2\2\u0469\u046b\5\u0100\u0081\2\u046a")
buf.write("\u046c\5\u00bc_\2\u046b\u046a\3\2\2\2\u046b\u046c\3\2")
buf.write("\2\2\u046c\u00ad\3\2\2\2\u046d\u0470\5\u00a0Q\2\u046e")
buf.write("\u0470\5\u00acW\2\u046f\u046d\3\2\2\2\u046f\u046e\3\2")
buf.write("\2\2\u0470\u00af\3\2\2\2\u0471\u0473\5\u00aeX\2\u0472")
buf.write("\u0471\3\2\2\2\u0473\u0474\3\2\2\2\u0474\u0472\3\2\2\2")
buf.write("\u0474\u0475\3\2\2\2\u0475\u0478\3\2\2\2\u0476\u0478\5")
buf.write("\u00c6d\2\u0477\u0472\3\2\2\2\u0477\u0476\3\2\2\2\u0478")
buf.write("\u00b1\3\2\2\2\u0479\u047c\7B\2\2\u047a\u047d\5\u010e")
buf.write("\u0088\2\u047b\u047d\5\u0110\u0089\2\u047c\u047a\3\2\2")
buf.write("\2\u047c\u047b\3\2\2\2\u047d\u0480\3\2\2\2\u047e\u0480")
buf.write("\5\u00c4c\2\u047f\u0479\3\2\2\2\u047f\u047e\3\2\2\2\u0480")
buf.write("\u00b3\3\2\2\2\u0481\u0483\5\u0110\u0089\2\u0482\u0481")
buf.write("\3\2\2\2\u0483\u0486\3\2\2\2\u0484\u0482\3\2\2\2\u0484")
buf.write("\u0485\3\2\2\2\u0485\u0487\3\2\2\2\u0486\u0484\3\2\2\2")
buf.write("\u0487\u0489\5\u00fa~\2\u0488\u0484\3\2\2\2\u0488\u0489")
buf.write("\3\2\2\2\u0489\u048b\3\2\2\2\u048a\u048c\5\u0110\u0089")
buf.write("\2\u048b\u048a\3\2\2\2\u048c\u048d\3\2\2\2\u048d\u048b")
buf.write("\3\2\2\2\u048d\u048e\3\2\2\2\u048e\u0491\3\2\2\2\u048f")
buf.write("\u0491\5\u00dco\2\u0490\u0488\3\2\2\2\u0490\u048f\3\2")
buf.write("\2\2\u0491\u00b5\3\2\2\2\u0492\u0497\t\27\2\2\u0493\u0497")
buf.write("\t\30\2\2\u0494\u0497\t\26\2\2\u0495\u0497\5\u00be`\2")
buf.write("\u0496\u0492\3\2\2\2\u0496\u0493\3\2\2\2\u0496\u0494\3")
buf.write("\2\2\2\u0496\u0495\3\2\2\2\u0497\u00b7\3\2\2\2\u0498\u049c")
buf.write("\5\u00b6\\\2\u0499\u049c\5\u00b2Z\2\u049a\u049c\5\u00ba")
buf.write("^\2\u049b\u0498\3\2\2\2\u049b\u0499\3\2\2\2\u049b\u049a")
buf.write("\3\2\2\2\u049c\u00b9\3\2\2\2\u049d\u04a4\7\16\2\2\u049e")
buf.write("\u04a0\5\u00b4[\2\u049f\u049e\3\2\2\2\u049f\u04a0\3\2")
buf.write("\2\2\u04a0\u04a1\3\2\2\2\u04a1\u04a3\5\u00b8]\2\u04a2")
buf.write("\u049f\3\2\2\2\u04a3\u04a6\3\2\2\2\u04a4\u04a2\3\2\2\2")
buf.write("\u04a4\u04a5\3\2\2\2\u04a5\u04a8\3\2\2\2\u04a6\u04a4\3")
buf.write("\2\2\2\u04a7\u04a9\5\u00b4[\2\u04a8\u04a7\3\2\2\2\u04a8")
buf.write("\u04a9\3\2\2\2\u04a9\u04aa\3\2\2\2\u04aa\u04ab\7\17\2")
buf.write("\2\u04ab\u00bb\3\2\2\2\u04ac\u04ae\5\u00b4[\2\u04ad\u04ac")
buf.write("\3\2\2\2\u04ad\u04ae\3\2\2\2\u04ae\u04af\3\2\2\2\u04af")
buf.write("\u04b1\5\u00ba^\2\u04b0\u04ad\3\2\2\2\u04b1\u04b2\3\2")
buf.write("\2\2\u04b2\u04b0\3\2\2\2\u04b2\u04b3\3\2\2\2\u04b3\u04b5")
buf.write("\3\2\2\2\u04b4\u04b6\5\u00b4[\2\u04b5\u04b4\3\2\2\2\u04b5")
buf.write("\u04b6\3\2\2\2\u04b6\u04b9\3\2\2\2\u04b7\u04b9\5\u00b4")
buf.write("[\2\u04b8\u04b0\3\2\2\2\u04b8\u04b7\3\2\2\2\u04b9\u00bd")
buf.write("\3\2\2\2\u04ba\u04bb\5\u00dep\2\u04bb\u00bf\3\2\2\2\u04bc")
buf.write("\u04bd\5\u00dep\2\u04bd\u00c1\3\2\2\2\u04be\u04c2\7e\2")
buf.write("\2\u04bf\u04c2\5\u00dep\2\u04c0\u04c2\5\u010e\u0088\2")
buf.write("\u04c1\u04be\3\2\2\2\u04c1\u04bf\3\2\2\2\u04c1\u04c0\3")
buf.write("\2\2\2\u04c2\u00c3\3\2\2\2\u04c3\u04c8\7B\2\2\u04c4\u04c9")
buf.write("\7e\2\2\u04c5\u04c9\5\u00dep\2\u04c6\u04c9\5\u0106\u0084")
buf.write("\2\u04c7\u04c9\5\u00f8}\2\u04c8\u04c4\3\2\2\2\u04c8\u04c5")
buf.write("\3\2\2\2\u04c8\u04c6\3\2\2\2\u04c8\u04c7\3\2\2\2\u04c9")
buf.write("\u00c5\3\2\2\2\u04ca\u04d0\5\u00aeX\2\u04cb\u04cf\5\u00ae")
buf.write("X\2\u04cc\u04cf\7\24\2\2\u04cd\u04cf\5\u00bc_\2\u04ce")
buf.write("\u04cb\3\2\2\2\u04ce\u04cc\3\2\2\2\u04ce\u04cd\3\2\2\2")
buf.write("\u04cf\u04d2\3\2\2\2\u04d0\u04ce\3\2\2\2\u04d0\u04d1\3")
buf.write("\2\2\2\u04d1\u00c7\3\2\2\2\u04d2\u04d0\3\2\2\2\u04d3\u04d6")
buf.write("\5\u00b0Y\2\u04d4\u04d6\5\u00bc_\2\u04d5\u04d3\3\2\2\2")
buf.write("\u04d5\u04d4\3\2\2\2\u04d5\u04d6\3\2\2\2\u04d6\u04de\3")
buf.write("\2\2\2\u04d7\u04da\7\22\2\2\u04d8\u04db\5\u00b0Y\2\u04d9")
buf.write("\u04db\5\u00bc_\2\u04da\u04d8\3\2\2\2\u04da\u04d9\3\2")
buf.write("\2\2\u04da\u04db\3\2\2\2\u04db\u04dd\3\2\2\2\u04dc\u04d7")
buf.write("\3\2\2\2\u04dd\u04e0\3\2\2\2\u04de\u04dc\3\2\2\2\u04de")
buf.write("\u04df\3\2\2\2\u04df\u00c9\3\2\2\2\u04e0\u04de\3\2\2\2")
buf.write("\u04e1\u04e3\5\u00bc_\2\u04e2\u04e1\3\2\2\2\u04e2\u04e3")
buf.write("\3\2\2\2\u04e3\u04e4\3\2\2\2\u04e4\u04e5\7\"\2\2\u04e5")
buf.write("\u04e6\5\u00ccg\2\u04e6\u04e7\5\u0094K\2\u04e7\u04e9\7")
buf.write("$\2\2\u04e8\u04ea\5\u00bc_\2\u04e9\u04e8\3\2\2\2\u04e9")
buf.write("\u04ea\3\2\2\2\u04ea\u00cb\3\2\2\2\u04eb\u04ec\5\u00ce")
buf.write("h\2\u04ec\u04ed\7 \2\2\u04ed\u00cd\3\2\2\2\u04ee\u04f1")
buf.write("\5\u00bc_\2\u04ef\u04f1\7\22\2\2\u04f0\u04ee\3\2\2\2\u04f0")
buf.write("\u04ef\3\2\2\2\u04f1\u04f4\3\2\2\2\u04f2\u04f0\3\2\2\2")
buf.write("\u04f2\u04f3\3\2\2\2\u04f3\u04f5\3\2\2\2\u04f4\u04f2\3")
buf.write("\2\2\2\u04f5\u04f6\7&\2\2\u04f6\u0501\5\u0098M\2\u04f7")
buf.write("\u04f9\7\22\2\2\u04f8\u04fa\5\u00bc_\2\u04f9\u04f8\3\2")
buf.write("\2\2\u04f9\u04fa\3\2\2\2\u04fa\u04fd\3\2\2\2\u04fb\u04fc")
buf.write("\7&\2\2\u04fc\u04fe\5\u0098M\2\u04fd\u04fb\3\2\2\2\u04fd")
buf.write("\u04fe\3\2\2\2\u04fe\u0500\3\2\2\2\u04ff\u04f7\3\2\2\2")
buf.write("\u0500\u0503\3\2\2\2\u0501\u04ff\3\2\2\2\u0501\u0502\3")
buf.write("\2\2\2\u0502\u00cf\3\2\2\2\u0503\u0501\3\2\2\2\u0504\u0506")
buf.write("\5\u00bc_\2\u0505\u0504\3\2\2\2\u0505\u0506\3\2\2\2\u0506")
buf.write("\u0507\3\2\2\2\u0507\u0509\7\22\2\2\u0508\u0505\3\2\2")
buf.write("\2\u0509\u050c\3\2\2\2\u050a\u0508\3\2\2\2\u050a\u050b")
buf.write("\3\2\2\2\u050b\u050d\3\2\2\2\u050c\u050a\3\2\2\2\u050d")
buf.write("\u0515\5\u00e2r\2\u050e\u0511\7\22\2\2\u050f\u0512\5\u00e2")
buf.write("r\2\u0510\u0512\5\u00bc_\2\u0511\u050f\3\2\2\2\u0511\u0510")
buf.write("\3\2\2\2\u0511\u0512\3\2\2\2\u0512\u0514\3\2\2\2\u0513")
buf.write("\u050e\3\2\2\2\u0514\u0517\3\2\2\2\u0515\u0513\3\2\2\2")
buf.write("\u0515\u0516\3\2\2\2\u0516\u00d1\3\2\2\2\u0517\u0515\3")
buf.write("\2\2\2\u0518\u051a\5\u00bc_\2\u0519\u0518\3\2\2\2\u0519")
buf.write("\u051a\3\2\2\2\u051a\u051b\3\2\2\2\u051b\u051d\7\22\2")
buf.write("\2\u051c\u0519\3\2\2\2\u051d\u0520\3\2\2\2\u051e\u051c")
buf.write("\3\2\2\2\u051e\u051f\3\2\2\2\u051f\u0521\3\2\2\2\u0520")
buf.write("\u051e\3\2\2\2\u0521\u0529\5\u00e0q\2\u0522\u0525\7\22")
buf.write("\2\2\u0523\u0526\5\u00e0q\2\u0524\u0526\5\u00bc_\2\u0525")
buf.write("\u0523\3\2\2\2\u0525\u0524\3\2\2\2\u0525\u0526\3\2\2\2")
buf.write("\u0526\u0528\3\2\2\2\u0527\u0522\3\2\2\2\u0528\u052b\3")
buf.write("\2\2\2\u0529\u0527\3\2\2\2\u0529\u052a\3\2\2\2\u052a\u00d3")
buf.write("\3\2\2\2\u052b\u0529\3\2\2\2\u052c\u052e\5\u00bc_\2\u052d")
buf.write("\u052c\3\2\2\2\u052d\u052e\3\2\2\2\u052e\u052f\3\2\2\2")
buf.write("\u052f\u0531\7\22\2\2\u0530\u052d\3\2\2\2\u0531\u0532")
buf.write("\3\2\2\2\u0532\u0530\3\2\2\2\u0532\u0533\3\2\2\2\u0533")
buf.write("\u0535\3\2\2\2\u0534\u0536\5\u00bc_\2\u0535\u0534\3\2")
buf.write("\2\2\u0535\u0536\3\2\2\2\u0536\u00d5\3\2\2\2\u0537\u053c")
buf.write("\5\u00aeX\2\u0538\u0539\7\24\2\2\u0539\u053b\5\u00aeX")
buf.write("\2\u053a\u0538\3\2\2\2\u053b\u053e\3\2\2\2\u053c\u053a")
buf.write("\3\2\2\2\u053c\u053d\3\2\2\2\u053d\u00d7\3\2\2\2\u053e")
buf.write("\u053c\3\2\2\2\u053f\u0544\5\u00a0Q\2\u0540\u0541\7\24")
buf.write("\2\2\u0541\u0543\5\u00a0Q\2\u0542\u0540\3\2\2\2\u0543")
buf.write("\u0546\3\2\2\2\u0544\u0542\3\2\2\2\u0544\u0545\3\2\2\2")
buf.write("\u0545\u00d9\3\2\2\2\u0546\u0544\3\2\2\2\u0547\u054a\5")
buf.write("\u00dep\2\u0548\u054a\5\u00b2Z\2\u0549\u0547\3\2\2\2\u0549")
buf.write("\u0548\3\2\2\2\u054a\u00db\3\2\2\2\u054b\u054d\5\u0110")
buf.write("\u0089\2\u054c\u054b\3\2\2\2\u054d\u054e\3\2\2\2\u054e")
buf.write("\u054c\3\2\2\2\u054e\u054f\3\2\2\2\u054f\u0558\3\2\2\2")
buf.write("\u0550\u0552\5\u00fa~\2\u0551\u0553\5\u0110\u0089\2\u0552")
buf.write("\u0551\3\2\2\2\u0553\u0554\3\2\2\2\u0554\u0552\3\2\2\2")
buf.write("\u0554\u0555\3\2\2\2\u0555\u0557\3\2\2\2\u0556\u0550\3")
buf.write("\2\2\2\u0557\u055a\3\2\2\2\u0558\u0556\3\2\2\2\u0558\u0559")
buf.write("\3\2\2\2\u0559\u00dd\3\2\2\2\u055a\u0558\3\2\2\2\u055b")
buf.write("\u0561\t\31\2\2\u055c\u0561\7n\2\2\u055d\u0561\7o\2\2")
buf.write("\u055e\u0561\t\32\2\2\u055f\u0561\7\u0082\2\2\u0560\u055b")
buf.write("\3\2\2\2\u0560\u055c\3\2\2\2\u0560\u055d\3\2\2\2\u0560")
buf.write("\u055e\3\2\2\2\u0560\u055f\3\2\2\2\u0561\u00df\3\2\2\2")
buf.write("\u0562\u0565\5\u00e2r\2\u0563\u0565\5\u00e8u\2\u0564\u0562")
buf.write("\3\2\2\2\u0564\u0563\3\2\2\2\u0565\u00e1\3\2\2\2\u0566")
buf.write("\u0569\5\u00e4s\2\u0567\u0569\5\u0094K\2\u0568\u0566\3")
buf.write("\2\2\2\u0568\u0567\3\2\2\2\u0569\u00e3\3\2\2\2\u056a\u056c")
buf.write("\5\u00eav\2\u056b\u056a\3\2\2\2\u056b\u056c\3\2\2\2\u056c")
buf.write("\u056d\3\2\2\2\u056d\u056e\5\u00e6t\2\u056e\u00e5\3\2")
buf.write("\2\2\u056f\u0571\5\u00bc_\2\u0570\u056f\3\2\2\2\u0570")
buf.write("\u0571\3\2\2\2\u0571\u0572\3\2\2\2\u0572\u0573\7\"\2\2")
buf.write("\u0573\u0574\5\u0094K\2\u0574\u0576\7$\2\2\u0575\u0577")
buf.write("\5\u00bc_\2\u0576\u0575\3\2\2\2\u0576\u0577\3\2\2\2\u0577")
buf.write("\u057a\3\2\2\2\u0578\u057a\5\u00caf\2\u0579\u0570\3\2")
buf.write("\2\2\u0579\u0578\3\2\2\2\u057a\u00e7\3\2\2\2\u057b\u057c")
buf.write("\5\u00eav\2\u057c\u057e\7 \2\2\u057d\u057f\5\u00f0y\2")
buf.write("\u057e\u057d\3\2\2\2\u057e\u057f\3\2\2\2\u057f\u0580\3")
buf.write("\2\2\2\u0580\u0582\7!\2\2\u0581\u0583\5\u00bc_\2\u0582")
buf.write("\u0581\3\2\2\2\u0582\u0583\3\2\2\2\u0583\u00e9\3\2\2\2")
buf.write("\u0584\u0585\5\u00b0Y\2\u0585\u00eb\3\2\2\2\u0586\u058b")
buf.write("\5\u00e2r\2\u0587\u0588\7\22\2\2\u0588\u058a\5\u00e2r")
buf.write("\2\u0589\u0587\3\2\2\2\u058a\u058d\3\2\2\2\u058b\u0589")
buf.write("\3\2\2\2\u058b\u058c\3\2\2\2\u058c\u0590\3\2\2\2\u058d")
buf.write("\u058b\3\2\2\2\u058e\u0590\5\u00d0i\2\u058f\u0586\3\2")
buf.write("\2\2\u058f\u058e\3\2\2\2\u0590\u00ed\3\2\2\2\u0591\u0596")
buf.write("\5\u00e0q\2\u0592\u0593\7\22\2\2\u0593\u0595\5\u00e0q")
buf.write("\2\u0594\u0592\3\2\2\2\u0595\u0598\3\2\2\2\u0596\u0594")
buf.write("\3\2\2\2\u0596\u0597\3\2\2\2\u0597\u059b\3\2\2\2\u0598")
buf.write("\u0596\3\2\2\2\u0599\u059b\5\u00d2j\2\u059a\u0591\3\2")
buf.write("\2\2\u059a\u0599\3\2\2\2\u059b\u00ef\3\2\2\2\u059c\u05a0")
buf.write("\5\u00ecw\2\u059d\u05a0\5\u00bc_\2\u059e\u05a0\5\u00d4")
buf.write("k\2\u059f\u059c\3\2\2\2\u059f\u059d\3\2\2\2\u059f\u059e")
buf.write("\3\2\2\2\u05a0\u00f1\3\2\2\2\u05a1\u05a4\t\f\2\2\u05a2")
buf.write("\u05a4\t\33\2\2\u05a3\u05a1\3\2\2\2\u05a3\u05a2\3\2\2")
buf.write("\2\u05a4\u00f3\3\2\2\2\u05a5\u05a6\t\34\2\2\u05a6\u00f5")
buf.write("\3\2\2\2\u05a7\u05a8\t\35\2\2\u05a8\u00f7\3\2\2\2\u05a9")
buf.write("\u05aa\7\5\2\2\u05aa\u00f9\3\2\2\2\u05ab\u05ac\5\u00f8")
buf.write("}\2\u05ac\u05ad\5\u0106\u0084\2\u05ad\u00fb\3\2\2\2\u05ae")
buf.write("\u05b1\t\36\2\2\u05af\u05b1\7\u0082\2\2\u05b0\u05ae\3")
buf.write("\2\2\2\u05b0\u05af\3\2\2\2\u05b1\u00fd\3\2\2\2\u05b2\u05b3")
buf.write("\t\13\2\2\u05b3\u00ff\3\2\2\2\u05b4\u05b5\7\b\2\2\u05b5")
buf.write("\u0101\3\2\2\2\u05b6\u05be\5\u00fe\u0080\2\u05b7\u05be")
buf.write("\t\37\2\2\u05b8\u05be\t \2\2\u05b9\u05be\t!\2\2\u05ba")
buf.write("\u05be\t\"\2\2\u05bb\u05be\t#\2\2\u05bc\u05be\t$\2\2\u05bd")
buf.write("\u05b6\3\2\2\2\u05bd\u05b7\3\2\2\2\u05bd\u05b8\3\2\2\2")
buf.write("\u05bd\u05b9\3\2\2\2\u05bd\u05ba\3\2\2\2\u05bd\u05bb\3")
buf.write("\2\2\2\u05bd\u05bc\3\2\2\2\u05be\u0103\3\2\2\2\u05bf\u05c0")
buf.write("\7\3\2\2\u05c0\u0105\3\2\2\2\u05c1\u05c2\7\4\2\2\u05c2")
buf.write("\u0107\3\2\2\2\u05c3\u05c8\5\u0110\u0089\2\u05c4\u05c5")
buf.write("\5\u00fa~\2\u05c5\u05c6\5\u0110\u0089\2\u05c6\u05c8\3")
buf.write("\2\2\2\u05c7\u05c3\3\2\2\2\u05c7\u05c4\3\2\2\2\u05c8\u05cb")
buf.write("\3\2\2\2\u05c9\u05c7\3\2\2\2\u05c9\u05ca\3\2\2\2\u05ca")
buf.write("\u0109\3\2\2\2\u05cb\u05c9\3\2\2\2\u05cc\u05cd\t%\2\2")
buf.write("\u05cd\u010b\3\2\2\2\u05ce\u05cf\7\6\2\2\u05cf\u010d\3")
buf.write("\2\2\2\u05d0\u05d1\t&\2\2\u05d1\u010f\3\2\2\2\u05d2\u05d5")
buf.write("\5\u010c\u0087\2\u05d3\u05d5\5\u0104\u0083\2\u05d4\u05d2")
buf.write("\3\2\2\2\u05d4\u05d3\3\2\2\2\u05d5\u0111\3\2\2\2\u05d6")
buf.write("\u05d7\5\u011e\u0090\2\u05d7\u05d8\7 \2\2\u05d8\u05db")
buf.write("\5\u0114\u008b\2\u05d9\u05da\7%\2\2\u05da\u05dc\5\u014c")
buf.write("\u00a7\2\u05db\u05d9\3\2\2\2\u05db\u05dc\3\2\2\2\u05dc")
buf.write("\u05df\3\2\2\2\u05dd\u05de\7\t\2\2\u05de\u05e0\5\u014e")
buf.write("\u00a8\2\u05df\u05dd\3\2\2\2\u05df\u05e0\3\2\2\2\u05e0")
buf.write("\u0113\3\2\2\2\u05e1\u05e2\7\25\2\2\u05e2\u05e3\7\25\2")
buf.write("\2\u05e3\u05e4\3\2\2\2\u05e4\u05e5\5\u0120\u0091\2\u05e5")
buf.write("\u05e6\5\u013a\u009e\2\u05e6\u05eb\3\2\2\2\u05e7\u05eb")
buf.write("\5\u013c\u009f\2\u05e8\u05eb\5\u0140\u00a1\2\u05e9\u05eb")
buf.write("\5\u0142\u00a2\2\u05ea\u05e1\3\2\2\2\u05ea\u05e7\3\2\2")
buf.write("\2\u05ea\u05e8\3\2\2\2\u05ea\u05e9\3\2\2\2\u05eb\u0115")
buf.write("\3\2\2\2\u05ec\u05ef\5\u0112\u008a\2\u05ed\u05ef\5\u011a")
buf.write("\u008e\2\u05ee\u05ec\3\2\2\2\u05ee\u05ed\3\2\2\2\u05ef")
buf.write("\u0117\3\2\2\2\u05f0\u05f1\5\u011e\u0090\2\u05f1\u05f2")
buf.write("\7 \2\2\u05f2\u05f5\5\u0114\u008b\2\u05f3\u05f4\7%\2\2")
buf.write("\u05f4\u05f6\5\u014c\u00a7\2\u05f5\u05f3\3\2\2\2\u05f5")
buf.write("\u05f6\3\2\2\2\u05f6\u0119\3\2\2\2\u05f7\u05fa\5\u011c")
buf.write("\u008f\2\u05f8\u05f9\7%\2\2\u05f9\u05fb\5\u014c\u00a7")
buf.write("\2\u05fa\u05f8\3\2\2\2\u05fa\u05fb\3\2\2\2\u05fb\u05fe")
buf.write("\3\2\2\2\u05fc\u05fd\7\t\2\2\u05fd\u05ff\5\u014e\u00a8")
buf.write("\2\u05fe\u05fc\3\2\2\2\u05fe\u05ff\3\2\2\2\u05ff\u011b")
buf.write("\3\2\2\2\u0600\u0601\7\25\2\2\u0601\u0602\7\25\2\2\u0602")
buf.write("\u0603\3\2\2\2\u0603\u0604\5\u0120\u0091\2\u0604\u0605")
buf.write("\5\u013a\u009e\2\u0605\u060a\3\2\2\2\u0606\u060a\5\u013c")
buf.write("\u009f\2\u0607\u060a\5\u013e\u00a0\2\u0608\u060a\5\u0142")
buf.write("\u00a2\2\u0609\u0600\3\2\2\2\u0609\u0606\3\2\2\2\u0609")
buf.write("\u0607\3\2\2\2\u0609\u0608\3\2\2\2\u060a\u011d\3\2\2\2")
buf.write("\u060b\u0613\5\u00f2z\2\u060c\u0612\5\u00f2z\2\u060d\u0612")
buf.write("\5\u00fe\u0080\2\u060e\u0612\7\21\2\2\u060f\u0612\7\23")
buf.write("\2\2\u0610\u0612\7\24\2\2\u0611\u060c\3\2\2\2\u0611\u060d")
buf.write("\3\2\2\2\u0611\u060e\3\2\2\2\u0611\u060f\3\2\2\2\u0611")
buf.write("\u0610\3\2\2\2\u0612\u0615\3\2\2\2\u0613\u0611\3\2\2\2")
buf.write("\u0613\u0614\3\2\2\2\u0614\u011f\3\2\2\2\u0615\u0613\3")
buf.write("\2\2\2\u0616\u0617\5\u0122\u0092\2\u0617\u0618\7&\2\2")
buf.write("\u0618\u061a\3\2\2\2\u0619\u0616\3\2\2\2\u0619\u061a\3")
buf.write("\2\2\2\u061a\u061b\3\2\2\2\u061b\u061e\5\u0124\u0093\2")
buf.write("\u061c\u061d\7 \2\2\u061d\u061f\5\u0126\u0094\2\u061e")
buf.write("\u061c\3\2\2\2\u061e\u061f\3\2\2\2\u061f\u0121\3\2\2\2")
buf.write("\u0620\u0625\5\u0152\u00aa\2\u0621\u0625\5\u0150\u00a9")
buf.write("\2\u0622\u0625\5\u0158\u00ad\2\u0623\u0625\7 \2\2\u0624")
buf.write("\u0620\3\2\2\2\u0624\u0621\3\2\2\2\u0624\u0622\3\2\2\2")
buf.write("\u0624\u0623\3\2\2\2\u0625\u0628\3\2\2\2\u0626\u0624\3")
buf.write("\2\2\2\u0626\u0627\3\2\2\2\u0627\u0123\3\2\2\2\u0628\u0626")
buf.write("\3\2\2\2\u0629\u062d\5\u0128\u0095\2\u062a\u062d\5\u0132")
buf.write("\u009a\2\u062b\u062d\5\u0136\u009c\2\u062c\u0629\3\2\2")
buf.write("\2\u062c\u062a\3\2\2\2\u062c\u062b\3\2\2\2\u062d\u0125")
buf.write("\3\2\2\2\u062e\u0630\5\u00fe\u0080\2\u062f\u062e\3\2\2")
buf.write("\2\u0630\u0633\3\2\2\2\u0631\u062f\3\2\2\2\u0631\u0632")
buf.write("\3\2\2\2\u0632\u0127\3\2\2\2\u0633\u0631\3\2\2\2\u0634")
buf.write("\u0637\7A\2\2\u0635\u0638\5\u012c\u0097\2\u0636\u0638")
buf.write("\5\u012a\u0096\2\u0637\u0635\3\2\2\2\u0637\u0636\3\2\2")
buf.write("\2\u0638\u0639\3\2\2\2\u0639\u063a\7C\2\2\u063a\u0129")
buf.write("\3\2\2\2\u063b\u063d\t\'\2\2\u063c\u063e\5\u0102\u0082")
buf.write("\2\u063d\u063c\3\2\2\2\u063e\u063f\3\2\2\2\u063f\u063d")
buf.write("\3\2\2\2\u063f\u0640\3\2\2\2\u0640\u0641\3\2\2\2\u0641")
buf.write("\u0645\7\24\2\2\u0642\u0646\5\u0152\u00aa\2\u0643\u0646")
buf.write("\5\u0158\u00ad\2\u0644\u0646\7 \2\2\u0645\u0642\3\2\2")
buf.write("\2\u0645\u0643\3\2\2\2\u0645\u0644\3\2\2\2\u0646\u0647")
buf.write("\3\2\2\2\u0647\u0645\3\2\2\2\u0647\u0648\3\2\2\2\u0648")
buf.write("\u012b\3\2\2\2\u0649\u064a\5\u012e\u0098\2\u064a\u064b")
buf.write("\7 \2\2\u064b\u064c\3\2\2\2\u064c\u064d\5\u012e\u0098")
buf.write("\2\u064d\u064e\7 \2\2\u064e\u064f\3\2\2\2\u064f\u0650")
buf.write("\5\u012e\u0098\2\u0650\u0651\7 \2\2\u0651\u0652\3\2\2")
buf.write("\2\u0652\u0653\5\u012e\u0098\2\u0653\u0654\7 \2\2\u0654")
buf.write("\u0655\3\2\2\2\u0655\u0656\5\u012e\u0098\2\u0656\u0657")
buf.write("\7 \2\2\u0657\u0658\3\2\2\2\u0658\u0659\5\u012e\u0098")
buf.write("\2\u0659\u065a\7 \2\2\u065a\u065b\3\2\2\2\u065b\u065c")
buf.write("\5\u0130\u0099\2\u065c\u0780\3\2\2\2\u065d\u065e\7 \2")
buf.write("\2\u065e\u065f\7 \2\2\u065f\u0660\3\2\2\2\u0660\u0661")
buf.write("\5\u012e\u0098\2\u0661\u0662\7 \2\2\u0662\u0663\3\2\2")
buf.write("\2\u0663\u0664\5\u012e\u0098\2\u0664\u0665\7 \2\2\u0665")
buf.write("\u0666\3\2\2\2\u0666\u0667\5\u012e\u0098\2\u0667\u0668")
buf.write("\7 \2\2\u0668\u0669\3\2\2\2\u0669\u066a\5\u012e\u0098")
buf.write("\2\u066a\u066b\7 \2\2\u066b\u066c\3\2\2\2\u066c\u066d")
buf.write("\5\u012e\u0098\2\u066d\u066e\7 \2\2\u066e\u066f\3\2\2")
buf.write("\2\u066f\u0670\5\u0130\u0099\2\u0670\u0780\3\2\2\2\u0671")
buf.write("\u0673\5\u012e\u0098\2\u0672\u0671\3\2\2\2\u0672\u0673")
buf.write("\3\2\2\2\u0673\u0674\3\2\2\2\u0674\u0675\7 \2\2\u0675")
buf.write("\u0676\7 \2\2\u0676\u0677\3\2\2\2\u0677\u0678\5\u012e")
buf.write("\u0098\2\u0678\u0679\7 \2\2\u0679\u067a\3\2\2\2\u067a")
buf.write("\u067b\5\u012e\u0098\2\u067b\u067c\7 \2\2\u067c\u067d")
buf.write("\3\2\2\2\u067d\u067e\5\u012e\u0098\2\u067e\u067f\7 \2")
buf.write("\2\u067f\u0680\3\2\2\2\u0680\u0681\5\u012e\u0098\2\u0681")
buf.write("\u0682\7 \2\2\u0682\u0683\3\2\2\2\u0683\u0684\5\u0130")
buf.write("\u0099\2\u0684\u0780\3\2\2\2\u0685\u0686\5\u012e\u0098")
buf.write("\2\u0686\u0687\7 \2\2\u0687\u0689\3\2\2\2\u0688\u0685")
buf.write("\3\2\2\2\u0688\u0689\3\2\2\2\u0689\u068a\3\2\2\2\u068a")
buf.write("\u068c\5\u012e\u0098\2\u068b\u0688\3\2\2\2\u068b\u068c")
buf.write("\3\2\2\2\u068c\u068d\3\2\2\2\u068d\u068e\7 \2\2\u068e")
buf.write("\u068f\7 \2\2\u068f\u0690\3\2\2\2\u0690\u0691\5\u012e")
buf.write("\u0098\2\u0691\u0692\7 \2\2\u0692\u0693\3\2\2\2\u0693")
buf.write("\u0694\5\u012e\u0098\2\u0694\u0695\7 \2\2\u0695\u0696")
buf.write("\3\2\2\2\u0696\u0697\5\u012e\u0098\2\u0697\u0698\7 \2")
buf.write("\2\u0698\u0699\3\2\2\2\u0699\u069a\5\u0130\u0099\2\u069a")
buf.write("\u0780\3\2\2\2\u069b\u069c\5\u012e\u0098\2\u069c\u069d")
buf.write("\7 \2\2\u069d\u069e\3\2\2\2\u069e\u069f\5\u012e\u0098")
buf.write("\2\u069f\u06a0\7 \2\2\u06a0\u06a7\3\2\2\2\u06a1\u06a2")
buf.write("\5\u012e\u0098\2\u06a2\u06a3\7 \2\2\u06a3\u06a5\3\2\2")
buf.write("\2\u06a4\u06a1\3\2\2\2\u06a4\u06a5\3\2\2\2\u06a5\u06a7")
buf.write("\3\2\2\2\u06a6\u069b\3\2\2\2\u06a6\u06a4\3\2\2\2\u06a7")
buf.write("\u06a8\3\2\2\2\u06a8\u06aa\5\u012e\u0098\2\u06a9\u06a6")
buf.write("\3\2\2\2\u06a9\u06aa\3\2\2\2\u06aa\u06ab\3\2\2\2\u06ab")
buf.write("\u06ac\7 \2\2\u06ac\u06ad\7 \2\2\u06ad\u06ae\3\2\2\2\u06ae")
buf.write("\u06af\5\u012e\u0098\2\u06af\u06b0\7 \2\2\u06b0\u06b1")
buf.write("\3\2\2\2\u06b1\u06b2\5\u012e\u0098\2\u06b2\u06b3\7 \2")
buf.write("\2\u06b3\u06b4\3\2\2\2\u06b4\u06b5\5\u0130\u0099\2\u06b5")
buf.write("\u0780\3\2\2\2\u06b6\u06b7\5\u012e\u0098\2\u06b7\u06b8")
buf.write("\7 \2\2\u06b8\u06b9\3\2\2\2\u06b9\u06ba\5\u012e\u0098")
buf.write("\2\u06ba\u06bb\7 \2\2\u06bb\u06bc\3\2\2\2\u06bc\u06bd")
buf.write("\5\u012e\u0098\2\u06bd\u06be\7 \2\2\u06be\u06cb\3\2\2")
buf.write("\2\u06bf\u06c0\5\u012e\u0098\2\u06c0\u06c1\7 \2\2\u06c1")
buf.write("\u06c2\3\2\2\2\u06c2\u06c3\5\u012e\u0098\2\u06c3\u06c4")
buf.write("\7 \2\2\u06c4\u06cb\3\2\2\2\u06c5\u06c6\5\u012e\u0098")
buf.write("\2\u06c6\u06c7\7 \2\2\u06c7\u06c9\3\2\2\2\u06c8\u06c5")
buf.write("\3\2\2\2\u06c8\u06c9\3\2\2\2\u06c9\u06cb\3\2\2\2\u06ca")
buf.write("\u06b6\3\2\2\2\u06ca\u06bf\3\2\2\2\u06ca\u06c8\3\2\2\2")
buf.write("\u06cb\u06cc\3\2\2\2\u06cc\u06ce\5\u012e\u0098\2\u06cd")
buf.write("\u06ca\3\2\2\2\u06cd\u06ce\3\2\2\2\u06ce\u06cf\3\2\2\2")
buf.write("\u06cf\u06d0\7 \2\2\u06d0\u06d1\7 \2\2\u06d1\u06d2\3\2")
buf.write("\2\2\u06d2\u06d3\5\u012e\u0098\2\u06d3\u06d4\7 \2\2\u06d4")
buf.write("\u06d5\5\u0130\u0099\2\u06d5\u0780\3\2\2\2\u06d6\u06d7")
buf.write("\5\u012e\u0098\2\u06d7\u06d8\7 \2\2\u06d8\u06d9\3\2\2")
buf.write("\2\u06d9\u06da\5\u012e\u0098\2\u06da\u06db\7 \2\2\u06db")
buf.write("\u06dc\3\2\2\2\u06dc\u06dd\5\u012e\u0098\2\u06dd\u06de")
buf.write("\7 \2\2\u06de\u06df\3\2\2\2\u06df\u06e0\5\u012e\u0098")
buf.write("\2\u06e0\u06e1\7 \2\2\u06e1\u06f7\3\2\2\2\u06e2\u06e3")
buf.write("\5\u012e\u0098\2\u06e3\u06e4\7 \2\2\u06e4\u06e5\3\2\2")
buf.write("\2\u06e5\u06e6\5\u012e\u0098\2\u06e6\u06e7\7 \2\2\u06e7")
buf.write("\u06e8\3\2\2\2\u06e8\u06e9\5\u012e\u0098\2\u06e9\u06ea")
buf.write("\7 \2\2\u06ea\u06f7\3\2\2\2\u06eb\u06ec\5\u012e\u0098")
buf.write("\2\u06ec\u06ed\7 \2\2\u06ed\u06ee\3\2\2\2\u06ee\u06ef")
buf.write("\5\u012e\u0098\2\u06ef\u06f0\7 \2\2\u06f0\u06f7\3\2\2")
buf.write("\2\u06f1\u06f2\5\u012e\u0098\2\u06f2\u06f3\7 \2\2\u06f3")
buf.write("\u06f5\3\2\2\2\u06f4\u06f1\3\2\2\2\u06f4\u06f5\3\2\2\2")
buf.write("\u06f5\u06f7\3\2\2\2\u06f6\u06d6\3\2\2\2\u06f6\u06e2\3")
buf.write("\2\2\2\u06f6\u06eb\3\2\2\2\u06f6\u06f4\3\2\2\2\u06f7\u06f8")
buf.write("\3\2\2\2\u06f8\u06fa\5\u012e\u0098\2\u06f9\u06f6\3\2\2")
buf.write("\2\u06f9\u06fa\3\2\2\2\u06fa\u06fb\3\2\2\2\u06fb\u06fc")
buf.write("\7 \2\2\u06fc\u06fd\7 \2\2\u06fd\u06fe\3\2\2\2\u06fe\u0780")
buf.write("\5\u0130\u0099\2\u06ff\u0700\5\u012e\u0098\2\u0700\u0701")
buf.write("\7 \2\2\u0701\u0702\3\2\2\2\u0702\u0703\5\u012e\u0098")
buf.write("\2\u0703\u0704\7 \2\2\u0704\u0705\3\2\2\2\u0705\u0706")
buf.write("\5\u012e\u0098\2\u0706\u0707\7 \2\2\u0707\u0708\3\2\2")
buf.write("\2\u0708\u0709\5\u012e\u0098\2\u0709\u070a\7 \2\2\u070a")
buf.write("\u070b\3\2\2\2\u070b\u070c\5\u012e\u0098\2\u070c\u070d")
buf.write("\7 \2\2\u070d\u072f\3\2\2\2\u070e\u070f\5\u012e\u0098")
buf.write("\2\u070f\u0710\7 \2\2\u0710\u0711\3\2\2\2\u0711\u0712")
buf.write("\5\u012e\u0098\2\u0712\u0713\7 \2\2\u0713\u0714\3\2\2")
buf.write("\2\u0714\u0715\5\u012e\u0098\2\u0715\u0716\7 \2\2\u0716")
buf.write("\u0717\3\2\2\2\u0717\u0718\5\u012e\u0098\2\u0718\u0719")
buf.write("\7 \2\2\u0719\u072f\3\2\2\2\u071a\u071b\5\u012e\u0098")
buf.write("\2\u071b\u071c\7 \2\2\u071c\u071d\3\2\2\2\u071d\u071e")
buf.write("\5\u012e\u0098\2\u071e\u071f\7 \2\2\u071f\u0720\3\2\2")
buf.write("\2\u0720\u0721\5\u012e\u0098\2\u0721\u0722\7 \2\2\u0722")
buf.write("\u072f\3\2\2\2\u0723\u0724\5\u012e\u0098\2\u0724\u0725")
buf.write("\7 \2\2\u0725\u0726\3\2\2\2\u0726\u0727\5\u012e\u0098")
buf.write("\2\u0727\u0728\7 \2\2\u0728\u072f\3\2\2\2\u0729\u072a")
buf.write("\5\u012e\u0098\2\u072a\u072b\7 \2\2\u072b\u072d\3\2\2")
buf.write("\2\u072c\u0729\3\2\2\2\u072c\u072d\3\2\2\2\u072d\u072f")
buf.write("\3\2\2\2\u072e\u06ff\3\2\2\2\u072e\u070e\3\2\2\2\u072e")
buf.write("\u071a\3\2\2\2\u072e\u0723\3\2\2\2\u072e\u072c\3\2\2\2")
buf.write("\u072f\u0730\3\2\2\2\u0730\u0732\5\u012e\u0098\2\u0731")
buf.write("\u072e\3\2\2\2\u0731\u0732\3\2\2\2\u0732\u0733\3\2\2\2")
buf.write("\u0733\u0734\7 \2\2\u0734\u0735\7 \2\2\u0735\u0736\3\2")
buf.write("\2\2\u0736\u0780\5\u012e\u0098\2\u0737\u0738\5\u012e\u0098")
buf.write("\2\u0738\u0739\7 \2\2\u0739\u073a\3\2\2\2\u073a\u073b")
buf.write("\5\u012e\u0098\2\u073b\u073c\7 \2\2\u073c\u073d\3\2\2")
buf.write("\2\u073d\u073e\5\u012e\u0098\2\u073e\u073f\7 \2\2\u073f")
buf.write("\u0740\3\2\2\2\u0740\u0741\5\u012e\u0098\2\u0741\u0742")
buf.write("\7 \2\2\u0742\u0743\3\2\2\2\u0743\u0744\5\u012e\u0098")
buf.write("\2\u0744\u0745\7 \2\2\u0745\u0746\3\2\2\2\u0746\u0747")
buf.write("\5\u012e\u0098\2\u0747\u0748\7 \2\2\u0748\u0779\3\2\2")
buf.write("\2\u0749\u074a\5\u012e\u0098\2\u074a\u074b\7 \2\2\u074b")
buf.write("\u074c\3\2\2\2\u074c\u074d\5\u012e\u0098\2\u074d\u074e")
buf.write("\7 \2\2\u074e\u074f\3\2\2\2\u074f\u0750\5\u012e\u0098")
buf.write("\2\u0750\u0751\7 \2\2\u0751\u0752\3\2\2\2\u0752\u0753")
buf.write("\5\u012e\u0098\2\u0753\u0754\7 \2\2\u0754\u0755\3\2\2")
buf.write("\2\u0755\u0756\5\u012e\u0098\2\u0756\u0757\7 \2\2\u0757")
buf.write("\u0779\3\2\2\2\u0758\u0759\5\u012e\u0098\2\u0759\u075a")
buf.write("\7 \2\2\u075a\u075b\3\2\2\2\u075b\u075c\5\u012e\u0098")
buf.write("\2\u075c\u075d\7 \2\2\u075d\u075e\3\2\2\2\u075e\u075f")
buf.write("\5\u012e\u0098\2\u075f\u0760\7 \2\2\u0760\u0761\3\2\2")
buf.write("\2\u0761\u0762\5\u012e\u0098\2\u0762\u0763\7 \2\2\u0763")
buf.write("\u0779\3\2\2\2\u0764\u0765\5\u012e\u0098\2\u0765\u0766")
buf.write("\7 \2\2\u0766\u0767\3\2\2\2\u0767\u0768\5\u012e\u0098")
buf.write("\2\u0768\u0769\7 \2\2\u0769\u076a\3\2\2\2\u076a\u076b")
buf.write("\5\u012e\u0098\2\u076b\u076c\7 \2\2\u076c\u0779\3\2\2")
buf.write("\2\u076d\u076e\5\u012e\u0098\2\u076e\u076f\7 \2\2\u076f")
buf.write("\u0770\3\2\2\2\u0770\u0771\5\u012e\u0098\2\u0771\u0772")
buf.write("\7 \2\2\u0772\u0779\3\2\2\2\u0773\u0774\5\u012e\u0098")
buf.write("\2\u0774\u0775\7 \2\2\u0775\u0777\3\2\2\2\u0776\u0773")
buf.write("\3\2\2\2\u0776\u0777\3\2\2\2\u0777\u0779\3\2\2\2\u0778")
buf.write("\u0737\3\2\2\2\u0778\u0749\3\2\2\2\u0778\u0758\3\2\2\2")
buf.write("\u0778\u0764\3\2\2\2\u0778\u076d\3\2\2\2\u0778\u0776\3")
buf.write("\2\2\2\u0779\u077a\3\2\2\2\u077a\u077c\5\u012e\u0098\2")
buf.write("\u077b\u0778\3\2\2\2\u077b\u077c\3\2\2\2\u077c\u077d\3")
buf.write("\2\2\2\u077d\u077e\7 \2\2\u077e\u0780\7 \2\2\u077f\u0649")
buf.write("\3\2\2\2\u077f\u065d\3\2\2\2\u077f\u0672\3\2\2\2\u077f")
buf.write("\u068b\3\2\2\2\u077f\u06a9\3\2\2\2\u077f\u06cd\3\2\2\2")
buf.write("\u077f\u06f9\3\2\2\2\u077f\u0731\3\2\2\2\u077f\u077b\3")
buf.write("\2\2\2\u0780\u012d\3\2\2\2\u0781\u078c\5\u0102\u0082\2")
buf.write("\u0782\u0783\5\u0102\u0082\2\u0783\u0784\5\u0102\u0082")
buf.write("\2\u0784\u0785\5\u0102\u0082\2\u0785\u078d\3\2\2\2\u0786")
buf.write("\u0787\5\u0102\u0082\2\u0787\u0788\5\u0102\u0082\2\u0788")
buf.write("\u078d\3\2\2\2\u0789\u078b\5\u0102\u0082\2\u078a\u0789")
buf.write("\3\2\2\2\u078a\u078b\3\2\2\2\u078b\u078d\3\2\2\2\u078c")
buf.write("\u0782\3\2\2\2\u078c\u0786\3\2\2\2\u078c\u078a\3\2\2\2")
buf.write("\u078d\u012f\3\2\2\2\u078e\u078f\5\u012e\u0098\2\u078f")
buf.write("\u0790\7 \2\2\u0790\u0791\5\u012e\u0098\2\u0791\u0794")
buf.write("\3\2\2\2\u0792\u0794\5\u0132\u009a\2\u0793\u078e\3\2\2")
buf.write("\2\u0793\u0792\3\2\2\2\u0794\u0131\3\2\2\2\u0795\u0796")
buf.write("\5\u0134\u009b\2\u0796\u0797\7\24\2\2\u0797\u0798\5\u0134")
buf.write("\u009b\2\u0798\u0799\7\24\2\2\u0799\u079a\5\u0134\u009b")
buf.write("\2\u079a\u079b\7\24\2\2\u079b\u079c\5\u0134\u009b\2\u079c")
buf.write("\u0133\3\2\2\2\u079d\u07ac\5\u00fe\u0080\2\u079e\u079f")
buf.write("\t\21\2\2\u079f\u07ac\5\u00fe\u0080\2\u07a0\u07a1\7\27")
buf.write("\2\2\u07a1\u07a2\5\u00fe\u0080\2\u07a2\u07a3\5\u00fe\u0080")
buf.write("\2\u07a3\u07ac\3\2\2\2\u07a4\u07a5\7\30\2\2\u07a5\u07a6")
buf.write("\t\22\2\2\u07a6\u07ac\5\u00fe\u0080\2\u07a7\u07a8\7\30")
buf.write("\2\2\u07a8\u07a9\7\33\2\2\u07a9\u07aa\3\2\2\2\u07aa\u07ac")
buf.write("\t\23\2\2\u07ab\u079d\3\2\2\2\u07ab\u079e\3\2\2\2\u07ab")
buf.write("\u07a0\3\2\2\2\u07ab\u07a4\3\2\2\2\u07ab\u07a7\3\2\2\2")
buf.write("\u07ac\u0135\3\2\2\2\u07ad\u07b1\5\u0152\u00aa\2\u07ae")
buf.write("\u07b1\5\u0150\u00a9\2\u07af\u07b1\5\u0158\u00ad\2\u07b0")
buf.write("\u07ad\3\2\2\2\u07b0\u07ae\3\2\2\2\u07b0\u07af\3\2\2\2")
buf.write("\u07b1\u07b4\3\2\2\2\u07b2\u07b0\3\2\2\2\u07b2\u07b3\3")
buf.write("\2\2\2\u07b3\u0137\3\2\2\2\u07b4\u07b2\3\2\2\2\u07b5\u07bb")
buf.write("\5\u013a\u009e\2\u07b6\u07bb\5\u013c\u009f\2\u07b7\u07bb")
buf.write("\5\u013e\u00a0\2\u07b8\u07bb\5\u0140\u00a1\2\u07b9\u07bb")
buf.write("\5\u0142\u00a2\2\u07ba\u07b5\3\2\2\2\u07ba\u07b6\3\2\2")
buf.write("\2\u07ba\u07b7\3\2\2\2\u07ba\u07b8\3\2\2\2\u07ba\u07b9")
buf.write("\3\2\2\2\u07bb\u0139\3\2\2\2\u07bc\u07bd\7\25\2\2\u07bd")
buf.write("\u07bf\5\u0144\u00a3\2\u07be\u07bc\3\2\2\2\u07bf\u07c2")
buf.write("\3\2\2\2\u07c0\u07be\3\2\2\2\u07c0\u07c1\3\2\2\2\u07c1")
buf.write("\u013b\3\2\2\2\u07c2\u07c0\3\2\2\2\u07c3\u07cc\7\25\2")
buf.write("\2\u07c4\u07c9\5\u0146\u00a4\2\u07c5\u07c6\7\25\2\2\u07c6")
buf.write("\u07c8\5\u0144\u00a3\2\u07c7\u07c5\3\2\2\2\u07c8\u07cb")
buf.write("\3\2\2\2\u07c9\u07c7\3\2\2\2\u07c9\u07ca\3\2\2\2\u07ca")
buf.write("\u07cd\3\2\2\2\u07cb\u07c9\3\2\2\2\u07cc\u07c4\3\2\2\2")
buf.write("\u07cc\u07cd\3\2\2\2\u07cd\u013d\3\2\2\2\u07ce\u07d3\5")
buf.write("\u0148\u00a5\2\u07cf\u07d0\7\25\2\2\u07d0\u07d2\5\u0144")
buf.write("\u00a3\2\u07d1\u07cf\3\2\2\2\u07d2\u07d5\3\2\2\2\u07d3")
buf.write("\u07d1\3\2\2\2\u07d3\u07d4\3\2\2\2\u07d4\u013f\3\2\2\2")
buf.write("\u07d5\u07d3\3\2\2\2\u07d6\u07db\5\u0146\u00a4\2\u07d7")
buf.write("\u07d8\7\25\2\2\u07d8\u07da\5\u0144\u00a3\2\u07d9\u07d7")
buf.write("\3\2\2\2\u07da\u07dd\3\2\2\2\u07db\u07d9\3\2\2\2\u07db")
buf.write("\u07dc\3\2\2\2\u07dc\u0141\3\2\2\2\u07dd\u07db\3\2\2\2")
buf.write("\u07de\u07df\3\2\2\2\u07df\u0143\3\2\2\2\u07e0\u07e2\5")
buf.write("\u014a\u00a6\2\u07e1\u07e0\3\2\2\2\u07e2\u07e5\3\2\2\2")
buf.write("\u07e3\u07e1\3\2\2\2\u07e3\u07e4\3\2\2\2\u07e4\u0145\3")
buf.write("\2\2\2\u07e5\u07e3\3\2\2\2\u07e6\u07e8\5\u014a\u00a6\2")
buf.write("\u07e7\u07e6\3\2\2\2\u07e8\u07e9\3\2\2\2\u07e9\u07e7\3")
buf.write("\2\2\2\u07e9\u07ea\3\2\2\2\u07ea\u0147\3\2\2\2\u07eb\u07f0")
buf.write("\5\u0152\u00aa\2\u07ec\u07f0\5\u0150\u00a9\2\u07ed\u07f0")
buf.write("\5\u0158\u00ad\2\u07ee\u07f0\7&\2\2\u07ef\u07eb\3\2\2")
buf.write("\2\u07ef\u07ec\3\2\2\2\u07ef\u07ed\3\2\2\2\u07ef\u07ee")
buf.write("\3\2\2\2\u07f0\u07f1\3\2\2\2\u07f1\u07ef\3\2\2\2\u07f1")
buf.write("\u07f2\3\2\2\2\u07f2\u0149\3\2\2\2\u07f3\u07f9\5\u0152")
buf.write("\u00aa\2\u07f4\u07f9\5\u0150\u00a9\2\u07f5\u07f9\5\u0158")
buf.write("\u00ad\2\u07f6\u07f9\7 \2\2\u07f7\u07f9\7&\2\2\u07f8\u07f3")
buf.write("\3\2\2\2\u07f8\u07f4\3\2\2\2\u07f8\u07f5\3\2\2\2\u07f8")
buf.write("\u07f6\3\2\2\2\u07f8\u07f7\3\2\2\2\u07f9\u014b\3\2\2\2")
buf.write("\u07fa\u07fe\5\u014a\u00a6\2\u07fb\u07fe\7\25\2\2\u07fc")
buf.write("\u07fe\7%\2\2\u07fd\u07fa\3\2\2\2\u07fd\u07fb\3\2\2\2")
buf.write("\u07fd\u07fc\3\2\2\2\u07fe\u0801\3\2\2\2\u07ff\u07fd\3")
buf.write("\2\2\2\u07ff\u0800\3\2\2\2\u0800\u014d\3\2\2\2\u0801\u07ff")
buf.write("\3\2\2\2\u0802\u0806\5\u014a\u00a6\2\u0803\u0806\7\25")
buf.write("\2\2\u0804\u0806\7%\2\2\u0805\u0802\3\2\2\2\u0805\u0803")
buf.write("\3\2\2\2\u0805\u0804\3\2\2\2\u0806\u0809\3\2\2\2\u0807")
buf.write("\u0805\3\2\2\2\u0807\u0808\3\2\2\2\u0808\u014f\3\2\2\2")
buf.write("\u0809\u0807\3\2\2\2\u080a\u080b\7\13\2\2\u080b\u080c")
buf.write("\5\u0102\u0082\2\u080c\u080d\5\u0102\u0082\2\u080d\u0151")
buf.write("\3\2\2\2\u080e\u0815\5\u00f2z\2\u080f\u0815\5\u00fe\u0080")
buf.write("\2\u0810\u0815\7\23\2\2\u0811\u0815\7\24\2\2\u0812\u0815")
buf.write("\7E\2\2\u0813\u0815\7d\2\2\u0814\u080e\3\2\2\2\u0814\u080f")
buf.write("\3\2\2\2\u0814\u0810\3\2\2\2\u0814\u0811\3\2\2\2\u0814")
buf.write("\u0812\3\2\2\2\u0814\u0813\3\2\2\2\u0815\u0153\3\2\2\2")
buf.write("\u0816\u0819\5\u0156\u00ac\2\u0817\u0819\5\u0158\u00ad")
buf.write("\2\u0818\u0816\3\2\2\2\u0818\u0817\3\2\2\2\u0819\u0155")
buf.write("\3\2\2\2\u081a\u081b\t(\2\2\u081b\u0157\3\2\2\2\u081c")
buf.write("\u081d\t)\2\2\u081d\u0159\3\2\2\2\u00f0\u0162\u016f\u018c")
buf.write("\u0193\u019c\u01a6\u01bb\u01c8\u01cf\u01d4\u01e0\u01e7")
buf.write("\u01ee\u01f4\u01fc\u0205\u020d\u0216\u0220\u0229\u0232")
buf.write("\u0237\u0242\u0248\u024e\u0255\u025a\u0264\u026b\u0272")
buf.write("\u0279\u027c\u0282\u0284\u0288\u028f\u0293\u0297\u02a5")
buf.write("\u02ab\u02af\u02b4\u02b7\u02d6\u02db\u02df\u02f2\u02f8")
buf.write("\u02ff\u030e\u0314\u031a\u0320\u0330\u033a\u033f\u0346")
buf.write("\u0348\u034b\u0350\u0355\u035a\u035f\u0361\u0372\u037a")
buf.write("\u0380\u0382\u0389\u0395\u0397\u03a0\u03a2\u03a6\u03a8")
buf.write("\u03b1\u03b6\u03be\u03c4\u03c9\u03d6\u03de\u03e7\u03ec")
buf.write("\u03ef\u03f3\u03f8\u03fc\u0400\u0405\u041c\u041f\u0424")
buf.write("\u0427\u042c\u0432\u0436\u043a\u043e\u044d\u0453\u0457")
buf.write("\u045a\u045e\u0463\u0467\u046b\u046f\u0474\u0477\u047c")
buf.write("\u047f\u0484\u0488\u048d\u0490\u0496\u049b\u049f\u04a4")
buf.write("\u04a8\u04ad\u04b2\u04b5\u04b8\u04c1\u04c8\u04ce\u04d0")
buf.write("\u04d5\u04da\u04de\u04e2\u04e9\u04f0\u04f2\u04f9\u04fd")
buf.write("\u0501\u0505\u050a\u0511\u0515\u0519\u051e\u0525\u0529")
buf.write("\u052d\u0532\u0535\u053c\u0544\u0549\u054e\u0554\u0558")
buf.write("\u0560\u0564\u0568\u056b\u0570\u0576\u0579\u057e\u0582")
buf.write("\u058b\u058f\u0596\u059a\u059f\u05a3\u05b0\u05bd\u05c7")
buf.write("\u05c9\u05d4\u05db\u05df\u05ea\u05ee\u05f5\u05fa\u05fe")
buf.write("\u0609\u0611\u0613\u0619\u061e\u0624\u0626\u062c\u0631")
buf.write("\u0637\u063f\u0645\u0647\u0672\u0688\u068b\u06a4\u06a6")
buf.write("\u06a9\u06c8\u06ca\u06cd\u06f4\u06f6\u06f9\u072c\u072e")
buf.write("\u0731\u0776\u0778\u077b\u077f\u078a\u078c\u0793\u07ab")
buf.write("\u07b0\u07b2\u07ba\u07c0\u07c9\u07cc\u07d3\u07db\u07e3")
buf.write("\u07e9\u07ef\u07f1\u07f8\u07fd\u07ff\u0805\u0807\u0814")
buf.write("\u0818")
return buf.getvalue()
class sdpParser ( Parser ):
grammarFileName = "sdp.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'\u0009'", "'\u000A'", "'\u000D'", "' '",
"'!'", "'\"'", "'#'", "'$'", "'%'", "'&'", "'''", "'('",
"')'", "'*'", "'+'", "','", "'-'", "'.'", "'/'", "'0'",
"'1'", "'2'", "'3'", "'4'", "'5'", "'6'", "'7'", "'8'",
"'9'", "':'", "';'", "'<'", "'='", "'>'", "'?'", "'@'",
"'A'", "'B'", "'C'", "'D'", "'E'", "'F'", "'G'", "'H'",
"'I'", "'J'", "'K'", "'L'", "'M'", "'N'", "'O'", "'P'",
"'Q'", "'R'", "'S'", "'T'", "'U'", "'V'", "'W'", "'X'",
"'Y'", "'Z'", "'['", "'\\'", "']'", "'^'", "'_'", "'`'",
"'a'", "'b'", "'c'", "'d'", "'e'", "'f'", "'g'", "'h'",
"'i'", "'j'", "'k'", "'l'", "'m'", "'n'", "'o'", "'p'",
"'q'", "'r'", "'s'", "'t'", "'u'", "'v'", "'w'", "'x'",
"'y'", "'z'", "'{'", "'|'", "'}'", "'~'", "'\u0000'",
"'\u0001'", "'\u0002'", "'\u0003'", "'\u0004'", "'\u0005'",
"'\u0006'", "'\u0007'", "'\u0008'", "'\u000B'", "'\u000C'",
"'\u000E'", "'\u000F'", "'\u0010'", "'\u0011'", "'\u0012'",
"'\u0013'", "'\u0014'", "'\u0015'", "'\u0016'", "'\u0017'",
"'\u0018'", "'\u0019'", "'\u001A'", "'\u001B'", "'\u001C'",
"'\u001D'", "'\u001E'", "'\u001F'", "'\u007F'", "'\u0080'",
"'\u0081'", "'\u0082'", "'\u0083'", "'\u0084'", "'\u0085'",
"'\u0086'", "'\u0087'", "'\u0088'", "'\u0089'", "'\u008A'",
"'\u008B'", "'\u008C'", "'\u008D'", "'\u008E'", "'\u008F'",
"'\u0090'", "'\u0091'", "'\u0092'", "'\u0093'", "'\u0094'",
"'\u0095'", "'\u0096'", "'\u0097'", "'\u0098'", "'\u0099'",
"'\u009A'", "'\u009B'", "'\u009C'", "'\u009D'", "'\u009E'",
"'\u009F'", "'\u00A0'", "'\u00A1'", "'\u00A2'", "'\u00A3'",
"'\u00A4'", "'\u00A5'", "'\u00A6'", "'\u00A7'", "'\u00A8'",
"'\u00A9'", "'\u00AA'", "'\u00AB'", "'\u00AC'", "'\u00AD'",
"'\u00AE'", "'\u00AF'", "'\u00B0'", "'\u00B1'", "'\u00B2'",
"'\u00B3'", "'\u00B4'", "'\u00B5'", "'\u00B6'", "'\u00B7'",
"'\u00B8'", "'\u00B9'", "'\u00BA'", "'\u00BB'", "'\u00BC'",
"'\u00BD'", "'\u00BE'", "'\u00BF'", "'\u00C0'", "'\u00C1'",
"'\u00C2'", "'\u00C3'", "'\u00C4'", "'\u00C5'", "'\u00C6'",
"'\u00C7'", "'\u00C8'", "'\u00C9'", "'\u00CA'", "'\u00CB'",
"'\u00CC'", "'\u00CD'", "'\u00CE'", "'\u00CF'", "'\u00D0'",
"'\u00D1'", "'\u00D2'", "'\u00D3'", "'\u00D4'", "'\u00D5'",
"'\u00D6'", "'\u00D7'", "'\u00D8'", "'\u00D9'", "'\u00DA'",
"'\u00DB'", "'\u00DC'", "'\u00DD'", "'\u00DE'", "'\u00DF'",
"'\u00E0'", "'\u00E1'", "'\u00E2'", "'\u00E3'", "'\u00E4'",
"'\u00E5'", "'\u00E6'", "'\u00E7'", "'\u00E8'", "'\u00E9'",
"'\u00EA'", "'\u00EB'", "'\u00EC'", "'\u00ED'", "'\u00EE'",
"'\u00EF'", "'\u00F0'", "'\u00F1'", "'\u00F2'", "'\u00F3'",
"'\u00F4'", "'\u00F5'", "'\u00F6'", "'\u00F7'", "'\u00F8'",
"'\u00F9'", "'\u00FA'", "'\u00FB'", "'\u00FC'", "'\u00FD'",
"'\u00FE'", "'\u00FF'" ]
symbolicNames = [ "<INVALID>", "TAB", "LF", "CR", "SPACE", "EXCLAMATION",
"QUOTE", "HASH", "DOLLAR", "PERCENT", "AMPERSAND",
"APOSTROPHE", "LEFT_PAREN", "RIGHT_PAREN", "ASTERISK",
"PLUS", "COMMA", "DASH", "PERIOD", "SLASH", "ZERO",
"ONE", "TWO", "THREE", "FOUR", "FIVE", "SIX", "SEVEN",
"EIGHT", "NINE", "COLON", "SEMICOLON", "LESS_THAN",
"EQUALS", "GREATER_THAN", "QUESTION", "AT", "CAP_A",
"CAP_B", "CAP_C", "CAP_D", "CAP_E", "CAP_F", "CAP_G",
"CAP_H", "CAP_I", "CAP_J", "CAP_K", "CAP_L", "CAP_M",
"CAP_N", "CAP_O", "CAP_P", "CAP_Q", "CAP_R", "CAP_S",
"CAP_T", "CAP_U", "CAP_V", "CAP_W", "CAP_X", "CAP_Y",
"CAP_Z", "LEFT_BRACE", "BACKSLASH", "RIGHT_BRACE",
"CARAT", "UNDERSCORE", "ACCENT", "A", "B", "C", "D",
"E", "F", "G", "H", "I", "J", "K", "L", "M", "N",
"O", "P", "Q", "R", "S", "T", "U", "V", "W", "X",
"Y", "Z", "LEFT_CURLY_BRACE", "PIPE", "RIGHT_CURLY_BRACE",
"TILDE", "U_0000", "U_0001", "U_0002", "U_0003", "U_0004",
"U_0005", "U_0006", "U_0007", "U_0008", "U_000B",
"U_000C", "U_000E", "U_000F", "U_0010", "U_0011",
"U_0012", "U_0013", "U_0014", "U_0015", "U_0016",
"U_0017", "U_0018", "U_0019", "U_001A", "U_001B",
"U_001C", "U_001D", "U_001E", "U_001F", "U_007F",
"U_0080", "U_0081", "U_0082", "U_0083", "U_0084",
"U_0085", "U_0086", "U_0087", "U_0088", "U_0089",
"U_008A", "U_008B", "U_008C", "U_008D", "U_008E",
"U_008F", "U_0090", "U_0091", "U_0092", "U_0093",
"U_0094", "U_0095", "U_0096", "U_0097", "U_0098",
"U_0099", "U_009A", "U_009B", "U_009C", "U_009D",
"U_009E", "U_009F", "U_00A0", "U_00A1", "U_00A2",
"U_00A3", "U_00A4", "U_00A5", "U_00A6", "U_00A7",
"U_00A8", "U_00A9", "U_00AA", "U_00AB", "U_00AC",
"U_00AD", "U_00AE", "U_00AF", "U_00B0", "U_00B1",
"U_00B2", "U_00B3", "U_00B4", "U_00B5", "U_00B6",
"U_00B7", "U_00B8", "U_00B9", "U_00BA", "U_00BB",
"U_00BC", "U_00BD", "U_00BE", "U_00BF", "U_00C0",
"U_00C1", "U_00C2", "U_00C3", "U_00C4", "U_00C5",
"U_00C6", "U_00C7", "U_00C8", "U_00C9", "U_00CA",
"U_00CB", "U_00CC", "U_00CD", "U_00CE", "U_00CF",
"U_00D0", "U_00D1", "U_00D2", "U_00D3", "U_00D4",
"U_00D5", "U_00D6", "U_00D7", "U_00D8", "U_00D9",
"U_00DA", "U_00DB", "U_00DC", "U_00DD", "U_00DE",
"U_00DF", "U_00E0", "U_00E1", "U_00E2", "U_00E3",
"U_00E4", "U_00E5", "U_00E6", "U_00E7", "U_00E8",
"U_00E9", "U_00EA", "U_00EB", "U_00EC", "U_00ED",
"U_00EE", "U_00EF", "U_00F0", "U_00F1", "U_00F2",
"U_00F3", "U_00F4", "U_00F5", "U_00F6", "U_00F7",
"U_00F8", "U_00F9", "U_00FA", "U_00FB", "U_00FC",
"U_00FD", "U_00FE", "U_00FF" ]
RULE_session_description = 0
RULE_proto_version = 1
RULE_origin_field = 2
RULE_session_name_field = 3
RULE_information_field = 4
RULE_uri_field = 5
RULE_email_fields = 6
RULE_phone_fields = 7
RULE_connection_field = 8
RULE_bandwidth_fields = 9
RULE_time_fields = 10
RULE_repeat_fields = 11
RULE_zone_adjustments = 12
RULE_key_field = 13
RULE_attribute_fields = 14
RULE_media_descriptions = 15
RULE_media_field = 16
RULE_username = 17
RULE_sess_id = 18
RULE_sess_version = 19
RULE_nettype = 20
RULE_addrtype = 21
RULE_uri = 22
RULE_email_address = 23
RULE_address_and_comment = 24
RULE_dispname_and_address = 25
RULE_phone_number = 26
RULE_phone = 27
RULE_connection_address = 28
RULE_bwtype = 29
RULE_bandwidth = 30
RULE_start_time = 31
RULE_stop_time = 32
RULE_time = 33
RULE_repeat_interval = 34
RULE_typed_time = 35
RULE_fixed_len_time_unit = 36
RULE_key_type = 37
RULE_base64 = 38
RULE_base64_unit = 39
RULE_base64_pad = 40
RULE_base64_char = 41
RULE_attribute = 42
RULE_att_field = 43
RULE_att_value = 44
RULE_media = 45
RULE_fmt = 46
RULE_proto = 47
RULE_port = 48
RULE_unicast_address = 49
RULE_multicast_address = 50
RULE_ip4_multicast = 51
RULE_m1 = 52
RULE_ip6_multicast = 53
RULE_ttl = 54
RULE_fqdn = 55
RULE_ip4_address = 56
RULE_b1 = 57
RULE_ip6_address = 58
RULE_hexpart = 59
RULE_hexseq = 60
RULE_hex4 = 61
RULE_extn_addr = 62
RULE_text = 63
RULE_byte_string = 64
RULE_non_ws_string = 65
RULE_token_char = 66
RULE_token = 67
RULE_email_safe = 68
RULE_integer = 69
RULE_alpha_numeric = 70
RULE_pos_digit = 71
RULE_decimal_uchar = 72
RULE_addr_spec = 73
RULE_local_part = 74
RULE_domain = 75
RULE_domain_literal = 76
RULE_dtext = 77
RULE_atext = 78
RULE_atom = 79
RULE_dot_atom_text = 80
RULE_dot_atom = 81
RULE_specials = 82
RULE_qtext = 83
RULE_qcontent = 84
RULE_quoted_string = 85
RULE_word = 86
RULE_phrase = 87
RULE_quoted_pair = 88
RULE_fws = 89
RULE_ctext = 90
RULE_ccontent = 91
RULE_comment = 92
RULE_cfws = 93
RULE_obs_ctext = 94
RULE_obs_qtext = 95
RULE_obs_utext = 96
RULE_obs_qp = 97
RULE_obs_phrase = 98
RULE_obs_phrase_list = 99
RULE_obs_angle_addr = 100
RULE_obs_route = 101
RULE_obs_domain_list = 102
RULE_obs_mbox_list = 103
RULE_obs_addr_list = 104
RULE_obs_group_list = 105
RULE_obs_local_part = 106
RULE_obs_domain = 107
RULE_obs_dtext = 108
RULE_obs_fws = 109
RULE_obs_no_ws_ctl = 110
RULE_address = 111
RULE_mailbox = 112
RULE_name_addr = 113
RULE_angle_addr = 114
RULE_group = 115
RULE_display_name = 116
RULE_mailbox_list = 117
RULE_address_list = 118
RULE_group_list = 119
RULE_alpha = 120
RULE_bit = 121
RULE_char_1 = 122
RULE_cr = 123
RULE_crlf = 124
RULE_ctl = 125
RULE_digit = 126
RULE_dquote = 127
RULE_hexdig = 128
RULE_htab = 129
RULE_lf = 130
RULE_lwsp = 131
RULE_octet = 132
RULE_sp = 133
RULE_vchar = 134
RULE_wsp = 135
RULE_xxuri = 136
RULE_hier_part = 137
RULE_uri_reference = 138
RULE_absolute_uri = 139
RULE_relative_ref = 140
RULE_relative_part = 141
RULE_scheme = 142
RULE_authority = 143
RULE_userinfo = 144
RULE_host = 145
RULE_xport = 146
RULE_ip_literal = 147
RULE_ipvfuture = 148
RULE_ipv6address = 149
RULE_h16 = 150
RULE_ls32 = 151
RULE_ipv4address = 152
RULE_dec_octet = 153
RULE_reg_name = 154
RULE_path = 155
RULE_path_abempty = 156
RULE_path_absolute = 157
RULE_path_noscheme = 158
RULE_path_rootless = 159
RULE_path_empty = 160
RULE_segment = 161
RULE_segment_nz = 162
RULE_segment_nz_nc = 163
RULE_pchar = 164
RULE_query = 165
RULE_fragment_1 = 166
RULE_pct_encoded = 167
RULE_unreserved = 168
RULE_reserved = 169
RULE_gen_delims = 170
RULE_sub_delims = 171
ruleNames = [ "session_description", "proto_version", "origin_field",
"session_name_field", "information_field", "uri_field",
"email_fields", "phone_fields", "connection_field", "bandwidth_fields",
"time_fields", "repeat_fields", "zone_adjustments", "key_field",
"attribute_fields", "media_descriptions", "media_field",
"username", "sess_id", "sess_version", "nettype", "addrtype",
"uri", "email_address", "address_and_comment", "dispname_and_address",
"phone_number", "phone", "connection_address", "bwtype",
"bandwidth", "start_time", "stop_time", "time", "repeat_interval",
"typed_time", "fixed_len_time_unit", "key_type", "base64",
"base64_unit", "base64_pad", "base64_char", "attribute",
"att_field", "att_value", "media", "fmt", "proto", "port",
"unicast_address", "multicast_address", "ip4_multicast",
"m1", "ip6_multicast", "ttl", "fqdn", "ip4_address",
"b1", "ip6_address", "hexpart", "hexseq", "hex4", "extn_addr",
"text", "byte_string", "non_ws_string", "token_char",
"token", "email_safe", "integer", "alpha_numeric", "pos_digit",
"decimal_uchar", "addr_spec", "local_part", "domain",
"domain_literal", "dtext", "atext", "atom", "dot_atom_text",
"dot_atom", "specials", "qtext", "qcontent", "quoted_string",
"word", "phrase", "quoted_pair", "fws", "ctext", "ccontent",
"comment", "cfws", "obs_ctext", "obs_qtext", "obs_utext",
"obs_qp", "obs_phrase", "obs_phrase_list", "obs_angle_addr",
"obs_route", "obs_domain_list", "obs_mbox_list", "obs_addr_list",
"obs_group_list", "obs_local_part", "obs_domain", "obs_dtext",
"obs_fws", "obs_no_ws_ctl", "address", "mailbox", "name_addr",
"angle_addr", "group", "display_name", "mailbox_list",
"address_list", "group_list", "alpha", "bit", "char_1",
"cr", "crlf", "ctl", "digit", "dquote", "hexdig", "htab",
"lf", "lwsp", "octet", "sp", "vchar", "wsp", "xxuri",
"hier_part", "uri_reference", "absolute_uri", "relative_ref",
"relative_part", "scheme", "authority", "userinfo", "host",
"xport", "ip_literal", "ipvfuture", "ipv6address", "h16",
"ls32", "ipv4address", "dec_octet", "reg_name", "path",
"path_abempty", "path_absolute", "path_noscheme", "path_rootless",
"path_empty", "segment", "segment_nz", "segment_nz_nc",
"pchar", "query", "fragment_1", "pct_encoded", "unreserved",
"reserved", "gen_delims", "sub_delims" ]
EOF = Token.EOF
TAB=1
LF=2
CR=3
SPACE=4
EXCLAMATION=5
QUOTE=6
HASH=7
DOLLAR=8
PERCENT=9
AMPERSAND=10
APOSTROPHE=11
LEFT_PAREN=12
RIGHT_PAREN=13
ASTERISK=14
PLUS=15
COMMA=16
DASH=17
PERIOD=18
SLASH=19
ZERO=20
ONE=21
TWO=22
THREE=23
FOUR=24
FIVE=25
SIX=26
SEVEN=27
EIGHT=28
NINE=29
COLON=30
SEMICOLON=31
LESS_THAN=32
EQUALS=33
GREATER_THAN=34
QUESTION=35
AT=36
CAP_A=37
CAP_B=38
CAP_C=39
CAP_D=40
CAP_E=41
CAP_F=42
CAP_G=43
CAP_H=44
CAP_I=45
CAP_J=46
CAP_K=47
CAP_L=48
CAP_M=49
CAP_N=50
CAP_O=51
CAP_P=52
CAP_Q=53
CAP_R=54
CAP_S=55
CAP_T=56
CAP_U=57
CAP_V=58
CAP_W=59
CAP_X=60
CAP_Y=61
CAP_Z=62
LEFT_BRACE=63
BACKSLASH=64
RIGHT_BRACE=65
CARAT=66
UNDERSCORE=67
ACCENT=68
A=69
B=70
C=71
D=72
E=73
F=74
G=75
H=76
I=77
J=78
K=79
L=80
M=81
N=82
O=83
P=84
Q=85
R=86
S=87
T=88
U=89
V=90
W=91
X=92
Y=93
Z=94
LEFT_CURLY_BRACE=95
PIPE=96
RIGHT_CURLY_BRACE=97
TILDE=98
U_0000=99
U_0001=100
U_0002=101
U_0003=102
U_0004=103
U_0005=104
U_0006=105
U_0007=106
U_0008=107
U_000B=108
U_000C=109
U_000E=110
U_000F=111
U_0010=112
U_0011=113
U_0012=114
U_0013=115
U_0014=116
U_0015=117
U_0016=118
U_0017=119
U_0018=120
U_0019=121
U_001A=122
U_001B=123
U_001C=124
U_001D=125
U_001E=126
U_001F=127
U_007F=128
U_0080=129
U_0081=130
U_0082=131
U_0083=132
U_0084=133
U_0085=134
U_0086=135
U_0087=136
U_0088=137
U_0089=138
U_008A=139
U_008B=140
U_008C=141
U_008D=142
U_008E=143
U_008F=144
U_0090=145
U_0091=146
U_0092=147
U_0093=148
U_0094=149
U_0095=150
U_0096=151
U_0097=152
U_0098=153
U_0099=154
U_009A=155
U_009B=156
U_009C=157
U_009D=158
U_009E=159
U_009F=160
U_00A0=161
U_00A1=162
U_00A2=163
U_00A3=164
U_00A4=165
U_00A5=166
U_00A6=167
U_00A7=168
U_00A8=169
U_00A9=170
U_00AA=171
U_00AB=172
U_00AC=173
U_00AD=174
U_00AE=175
U_00AF=176
U_00B0=177
U_00B1=178
U_00B2=179
U_00B3=180
U_00B4=181
U_00B5=182
U_00B6=183
U_00B7=184
U_00B8=185
U_00B9=186
U_00BA=187
U_00BB=188
U_00BC=189
U_00BD=190
U_00BE=191
U_00BF=192
U_00C0=193
U_00C1=194
U_00C2=195
U_00C3=196
U_00C4=197
U_00C5=198
U_00C6=199
U_00C7=200
U_00C8=201
U_00C9=202
U_00CA=203
U_00CB=204
U_00CC=205
U_00CD=206
U_00CE=207
U_00CF=208
U_00D0=209
U_00D1=210
U_00D2=211
U_00D3=212
U_00D4=213
U_00D5=214
U_00D6=215
U_00D7=216
U_00D8=217
U_00D9=218
U_00DA=219
U_00DB=220
U_00DC=221
U_00DD=222
U_00DE=223
U_00DF=224
U_00E0=225
U_00E1=226
U_00E2=227
U_00E3=228
U_00E4=229
U_00E5=230
U_00E6=231
U_00E7=232
U_00E8=233
U_00E9=234
U_00EA=235
U_00EB=236
U_00EC=237
U_00ED=238
U_00EE=239
U_00EF=240
U_00F0=241
U_00F1=242
U_00F2=243
U_00F3=244
U_00F4=245
U_00F5=246
U_00F6=247
U_00F7=248
U_00F8=249
U_00F9=250
U_00FA=251
U_00FB=252
U_00FC=253
U_00FD=254
U_00FE=255
U_00FF=256
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class Session_descriptionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def proto_version(self):
return self.getTypedRuleContext(sdpParser.Proto_versionContext,0)
def origin_field(self):
return self.getTypedRuleContext(sdpParser.Origin_fieldContext,0)
def session_name_field(self):
return self.getTypedRuleContext(sdpParser.Session_name_fieldContext,0)
def information_field(self):
return self.getTypedRuleContext(sdpParser.Information_fieldContext,0)
def uri_field(self):
return self.getTypedRuleContext(sdpParser.Uri_fieldContext,0)
def email_fields(self):
return self.getTypedRuleContext(sdpParser.Email_fieldsContext,0)
def phone_fields(self):
return self.getTypedRuleContext(sdpParser.Phone_fieldsContext,0)
def bandwidth_fields(self):
return self.getTypedRuleContext(sdpParser.Bandwidth_fieldsContext,0)
def time_fields(self):
return self.getTypedRuleContext(sdpParser.Time_fieldsContext,0)
def key_field(self):
return self.getTypedRuleContext(sdpParser.Key_fieldContext,0)
def attribute_fields(self):
return self.getTypedRuleContext(sdpParser.Attribute_fieldsContext,0)
def media_descriptions(self):
return self.getTypedRuleContext(sdpParser.Media_descriptionsContext,0)
def connection_field(self):
return self.getTypedRuleContext(sdpParser.Connection_fieldContext,0)
def getRuleIndex(self):
return sdpParser.RULE_session_description
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSession_description" ):
listener.enterSession_description(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSession_description" ):
listener.exitSession_description(self)
def session_description(self):
localctx = sdpParser.Session_descriptionContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_session_description)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 344
self.proto_version()
self.state = 345
self.origin_field()
self.state = 346
self.session_name_field()
self.state = 347
self.information_field()
self.state = 348
self.uri_field()
self.state = 349
self.email_fields()
self.state = 350
self.phone_fields()
self.state = 352
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.C:
self.state = 351
self.connection_field()
self.state = 354
self.bandwidth_fields()
self.state = 355
self.time_fields()
self.state = 356
self.key_field()
self.state = 357
self.attribute_fields()
self.state = 358
self.media_descriptions()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Proto_versionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def V(self):
return self.getToken(sdpParser.V, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_proto_version
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProto_version" ):
listener.enterProto_version(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProto_version" ):
listener.exitProto_version(self)
def proto_version(self):
localctx = sdpParser.Proto_versionContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_proto_version)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 360
self.match(sdpParser.V)
self.state = 361
self.match(sdpParser.EQUALS)
self.state = 363
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 362
self.digit()
self.state = 365
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
self.state = 367
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Origin_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def O(self):
return self.getToken(sdpParser.O, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def username(self):
return self.getTypedRuleContext(sdpParser.UsernameContext,0)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def sess_id(self):
return self.getTypedRuleContext(sdpParser.Sess_idContext,0)
def sess_version(self):
return self.getTypedRuleContext(sdpParser.Sess_versionContext,0)
def nettype(self):
return self.getTypedRuleContext(sdpParser.NettypeContext,0)
def addrtype(self):
return self.getTypedRuleContext(sdpParser.AddrtypeContext,0)
def unicast_address(self):
return self.getTypedRuleContext(sdpParser.Unicast_addressContext,0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def getRuleIndex(self):
return sdpParser.RULE_origin_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOrigin_field" ):
listener.enterOrigin_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOrigin_field" ):
listener.exitOrigin_field(self)
def origin_field(self):
localctx = sdpParser.Origin_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_origin_field)
try:
self.enterOuterAlt(localctx, 1)
self.state = 369
self.match(sdpParser.O)
self.state = 370
self.match(sdpParser.EQUALS)
self.state = 371
self.username()
self.state = 372
self.sp()
self.state = 373
self.sess_id()
self.state = 374
self.sp()
self.state = 375
self.sess_version()
self.state = 376
self.sp()
self.state = 377
self.nettype()
self.state = 378
self.sp()
self.state = 379
self.addrtype()
self.state = 380
self.sp()
self.state = 381
self.unicast_address()
self.state = 382
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Session_name_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def S(self):
return self.getToken(sdpParser.S, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def text(self):
return self.getTypedRuleContext(sdpParser.TextContext,0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def getRuleIndex(self):
return sdpParser.RULE_session_name_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSession_name_field" ):
listener.enterSession_name_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSession_name_field" ):
listener.exitSession_name_field(self)
def session_name_field(self):
localctx = sdpParser.Session_name_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_session_name_field)
try:
self.enterOuterAlt(localctx, 1)
self.state = 384
self.match(sdpParser.S)
self.state = 385
self.match(sdpParser.EQUALS)
self.state = 386
self.text()
self.state = 387
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Information_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def I(self):
return self.getToken(sdpParser.I, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def text(self):
return self.getTypedRuleContext(sdpParser.TextContext,0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def getRuleIndex(self):
return sdpParser.RULE_information_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInformation_field" ):
listener.enterInformation_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInformation_field" ):
listener.exitInformation_field(self)
def information_field(self):
localctx = sdpParser.Information_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_information_field)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 394
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.I:
self.state = 389
self.match(sdpParser.I)
self.state = 390
self.match(sdpParser.EQUALS)
self.state = 391
self.text()
self.state = 392
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Uri_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U(self):
return self.getToken(sdpParser.U, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def uri(self):
return self.getTypedRuleContext(sdpParser.UriContext,0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def getRuleIndex(self):
return sdpParser.RULE_uri_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUri_field" ):
listener.enterUri_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUri_field" ):
listener.exitUri_field(self)
def uri_field(self):
localctx = sdpParser.Uri_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_uri_field)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 401
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.U:
self.state = 396
self.match(sdpParser.U)
self.state = 397
self.match(sdpParser.EQUALS)
self.state = 398
self.uri()
self.state = 399
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Email_fieldsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.E)
else:
return self.getToken(sdpParser.E, i)
def EQUALS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EQUALS)
else:
return self.getToken(sdpParser.EQUALS, i)
def email_address(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Email_addressContext)
else:
return self.getTypedRuleContext(sdpParser.Email_addressContext,i)
def crlf(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CrlfContext)
else:
return self.getTypedRuleContext(sdpParser.CrlfContext,i)
def getRuleIndex(self):
return sdpParser.RULE_email_fields
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEmail_fields" ):
listener.enterEmail_fields(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEmail_fields" ):
listener.exitEmail_fields(self)
def email_fields(self):
localctx = sdpParser.Email_fieldsContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_email_fields)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 410
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.E:
self.state = 403
self.match(sdpParser.E)
self.state = 404
self.match(sdpParser.EQUALS)
self.state = 405
self.email_address()
self.state = 406
self.crlf()
self.state = 412
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Phone_fieldsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def P(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.P)
else:
return self.getToken(sdpParser.P, i)
def EQUALS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EQUALS)
else:
return self.getToken(sdpParser.EQUALS, i)
def phone_number(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Phone_numberContext)
else:
return self.getTypedRuleContext(sdpParser.Phone_numberContext,i)
def crlf(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CrlfContext)
else:
return self.getTypedRuleContext(sdpParser.CrlfContext,i)
def getRuleIndex(self):
return sdpParser.RULE_phone_fields
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPhone_fields" ):
listener.enterPhone_fields(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPhone_fields" ):
listener.exitPhone_fields(self)
def phone_fields(self):
localctx = sdpParser.Phone_fieldsContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_phone_fields)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 420
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.P:
self.state = 413
self.match(sdpParser.P)
self.state = 414
self.match(sdpParser.EQUALS)
self.state = 415
self.phone_number()
self.state = 416
self.crlf()
self.state = 422
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Connection_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def C(self):
return self.getToken(sdpParser.C, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def nettype(self):
return self.getTypedRuleContext(sdpParser.NettypeContext,0)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def addrtype(self):
return self.getTypedRuleContext(sdpParser.AddrtypeContext,0)
def connection_address(self):
return self.getTypedRuleContext(sdpParser.Connection_addressContext,0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def getRuleIndex(self):
return sdpParser.RULE_connection_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConnection_field" ):
listener.enterConnection_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConnection_field" ):
listener.exitConnection_field(self)
def connection_field(self):
localctx = sdpParser.Connection_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_connection_field)
try:
self.enterOuterAlt(localctx, 1)
self.state = 423
self.match(sdpParser.C)
self.state = 424
self.match(sdpParser.EQUALS)
self.state = 425
self.nettype()
self.state = 426
self.sp()
self.state = 427
self.addrtype()
self.state = 428
self.sp()
self.state = 429
self.connection_address()
self.state = 430
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Bandwidth_fieldsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.B)
else:
return self.getToken(sdpParser.B, i)
def EQUALS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EQUALS)
else:
return self.getToken(sdpParser.EQUALS, i)
def bwtype(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.BwtypeContext)
else:
return self.getTypedRuleContext(sdpParser.BwtypeContext,i)
def COLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COLON)
else:
return self.getToken(sdpParser.COLON, i)
def bandwidth(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.BandwidthContext)
else:
return self.getTypedRuleContext(sdpParser.BandwidthContext,i)
def crlf(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CrlfContext)
else:
return self.getTypedRuleContext(sdpParser.CrlfContext,i)
def getRuleIndex(self):
return sdpParser.RULE_bandwidth_fields
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBandwidth_fields" ):
listener.enterBandwidth_fields(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBandwidth_fields" ):
listener.exitBandwidth_fields(self)
def bandwidth_fields(self):
localctx = sdpParser.Bandwidth_fieldsContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_bandwidth_fields)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 441
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.B:
self.state = 432
self.match(sdpParser.B)
self.state = 433
self.match(sdpParser.EQUALS)
self.state = 434
self.bwtype()
self.state = 435
self.match(sdpParser.COLON)
self.state = 436
self.bandwidth()
self.state = 437
self.crlf()
self.state = 443
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Time_fieldsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def T(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.T)
else:
return self.getToken(sdpParser.T, i)
def EQUALS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EQUALS)
else:
return self.getToken(sdpParser.EQUALS, i)
def start_time(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Start_timeContext)
else:
return self.getTypedRuleContext(sdpParser.Start_timeContext,i)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def stop_time(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Stop_timeContext)
else:
return self.getTypedRuleContext(sdpParser.Stop_timeContext,i)
def crlf(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CrlfContext)
else:
return self.getTypedRuleContext(sdpParser.CrlfContext,i)
def zone_adjustments(self):
return self.getTypedRuleContext(sdpParser.Zone_adjustmentsContext,0)
def repeat_fields(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Repeat_fieldsContext)
else:
return self.getTypedRuleContext(sdpParser.Repeat_fieldsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_time_fields
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTime_fields" ):
listener.enterTime_fields(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTime_fields" ):
listener.exitTime_fields(self)
def time_fields(self):
localctx = sdpParser.Time_fieldsContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_time_fields)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 459
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 444
self.match(sdpParser.T)
self.state = 445
self.match(sdpParser.EQUALS)
self.state = 446
self.start_time()
self.state = 447
self.sp()
self.state = 448
self.stop_time()
self.state = 454
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 449
self.crlf()
self.state = 450
self.repeat_fields()
self.state = 456
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
self.state = 457
self.crlf()
self.state = 461
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==sdpParser.T):
break
self.state = 466
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.Z:
self.state = 463
self.zone_adjustments()
self.state = 464
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Repeat_fieldsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def R(self):
return self.getToken(sdpParser.R, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def repeat_interval(self):
return self.getTypedRuleContext(sdpParser.Repeat_intervalContext,0)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def typed_time(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Typed_timeContext)
else:
return self.getTypedRuleContext(sdpParser.Typed_timeContext,i)
def getRuleIndex(self):
return sdpParser.RULE_repeat_fields
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRepeat_fields" ):
listener.enterRepeat_fields(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRepeat_fields" ):
listener.exitRepeat_fields(self)
def repeat_fields(self):
localctx = sdpParser.Repeat_fieldsContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_repeat_fields)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 468
self.match(sdpParser.R)
self.state = 469
self.match(sdpParser.EQUALS)
self.state = 470
self.repeat_interval()
self.state = 471
self.sp()
self.state = 472
self.typed_time()
self.state = 476
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 473
self.sp()
self.state = 474
self.typed_time()
self.state = 478
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==sdpParser.SPACE):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Zone_adjustmentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Z(self):
return self.getToken(sdpParser.Z, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def time(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.TimeContext)
else:
return self.getTypedRuleContext(sdpParser.TimeContext,i)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def typed_time(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Typed_timeContext)
else:
return self.getTypedRuleContext(sdpParser.Typed_timeContext,i)
def DASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.DASH)
else:
return self.getToken(sdpParser.DASH, i)
def getRuleIndex(self):
return sdpParser.RULE_zone_adjustments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterZone_adjustments" ):
listener.enterZone_adjustments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitZone_adjustments" ):
listener.exitZone_adjustments(self)
def zone_adjustments(self):
localctx = sdpParser.Zone_adjustmentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_zone_adjustments)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 480
self.match(sdpParser.Z)
self.state = 481
self.match(sdpParser.EQUALS)
self.state = 482
self.time()
self.state = 483
self.sp()
self.state = 485
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.DASH:
self.state = 484
self.match(sdpParser.DASH)
self.state = 487
self.typed_time()
self.state = 498
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.SPACE:
self.state = 488
self.sp()
self.state = 489
self.time()
self.state = 490
self.sp()
self.state = 492
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.DASH:
self.state = 491
self.match(sdpParser.DASH)
self.state = 494
self.typed_time()
self.state = 500
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Key_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def K(self):
return self.getToken(sdpParser.K, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def key_type(self):
return self.getTypedRuleContext(sdpParser.Key_typeContext,0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def getRuleIndex(self):
return sdpParser.RULE_key_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKey_field" ):
listener.enterKey_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKey_field" ):
listener.exitKey_field(self)
def key_field(self):
localctx = sdpParser.Key_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_key_field)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 506
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.K:
self.state = 501
self.match(sdpParser.K)
self.state = 502
self.match(sdpParser.EQUALS)
self.state = 503
self.key_type()
self.state = 504
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Attribute_fieldsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.A)
else:
return self.getToken(sdpParser.A, i)
def EQUALS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EQUALS)
else:
return self.getToken(sdpParser.EQUALS, i)
def attribute(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.AttributeContext)
else:
return self.getTypedRuleContext(sdpParser.AttributeContext,i)
def crlf(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CrlfContext)
else:
return self.getTypedRuleContext(sdpParser.CrlfContext,i)
def getRuleIndex(self):
return sdpParser.RULE_attribute_fields
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttribute_fields" ):
listener.enterAttribute_fields(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttribute_fields" ):
listener.exitAttribute_fields(self)
def attribute_fields(self):
localctx = sdpParser.Attribute_fieldsContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_attribute_fields)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 515
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.A:
self.state = 508
self.match(sdpParser.A)
self.state = 509
self.match(sdpParser.EQUALS)
self.state = 510
self.attribute()
self.state = 511
self.crlf()
self.state = 517
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Media_descriptionsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def media_field(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Media_fieldContext)
else:
return self.getTypedRuleContext(sdpParser.Media_fieldContext,i)
def information_field(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Information_fieldContext)
else:
return self.getTypedRuleContext(sdpParser.Information_fieldContext,i)
def bandwidth_fields(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Bandwidth_fieldsContext)
else:
return self.getTypedRuleContext(sdpParser.Bandwidth_fieldsContext,i)
def key_field(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Key_fieldContext)
else:
return self.getTypedRuleContext(sdpParser.Key_fieldContext,i)
def attribute_fields(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Attribute_fieldsContext)
else:
return self.getTypedRuleContext(sdpParser.Attribute_fieldsContext,i)
def connection_field(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Connection_fieldContext)
else:
return self.getTypedRuleContext(sdpParser.Connection_fieldContext,i)
def getRuleIndex(self):
return sdpParser.RULE_media_descriptions
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMedia_descriptions" ):
listener.enterMedia_descriptions(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMedia_descriptions" ):
listener.exitMedia_descriptions(self)
def media_descriptions(self):
localctx = sdpParser.Media_descriptionsContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_media_descriptions)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 532
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.M:
self.state = 518
self.media_field()
self.state = 519
self.information_field()
self.state = 523
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.C:
self.state = 520
self.connection_field()
self.state = 525
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 526
self.bandwidth_fields()
self.state = 527
self.key_field()
self.state = 528
self.attribute_fields()
self.state = 534
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Media_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def M(self):
return self.getToken(sdpParser.M, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def media(self):
return self.getTypedRuleContext(sdpParser.MediaContext,0)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def port(self):
return self.getTypedRuleContext(sdpParser.PortContext,0)
def proto(self):
return self.getTypedRuleContext(sdpParser.ProtoContext,0)
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def integer(self):
return self.getTypedRuleContext(sdpParser.IntegerContext,0)
def fmt(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.FmtContext)
else:
return self.getTypedRuleContext(sdpParser.FmtContext,i)
def getRuleIndex(self):
return sdpParser.RULE_media_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMedia_field" ):
listener.enterMedia_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMedia_field" ):
listener.exitMedia_field(self)
def media_field(self):
localctx = sdpParser.Media_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_media_field)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 535
self.match(sdpParser.M)
self.state = 536
self.match(sdpParser.EQUALS)
self.state = 537
self.media()
self.state = 538
self.sp()
self.state = 539
self.port()
self.state = 542
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.SLASH:
self.state = 540
self.match(sdpParser.SLASH)
self.state = 541
self.integer()
self.state = 544
self.sp()
self.state = 545
self.proto()
self.state = 549
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 546
self.sp()
self.state = 547
self.fmt()
self.state = 551
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==sdpParser.SPACE):
break
self.state = 553
self.crlf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UsernameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def non_ws_string(self):
return self.getTypedRuleContext(sdpParser.Non_ws_stringContext,0)
def getRuleIndex(self):
return sdpParser.RULE_username
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUsername" ):
listener.enterUsername(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUsername" ):
listener.exitUsername(self)
def username(self):
localctx = sdpParser.UsernameContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_username)
try:
self.enterOuterAlt(localctx, 1)
self.state = 555
self.non_ws_string()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Sess_idContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_sess_id
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSess_id" ):
listener.enterSess_id(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSess_id" ):
listener.exitSess_id(self)
def sess_id(self):
localctx = sdpParser.Sess_idContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_sess_id)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 558
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 557
self.digit()
self.state = 560
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Sess_versionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_sess_version
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSess_version" ):
listener.enterSess_version(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSess_version" ):
listener.exitSess_version(self)
def sess_version(self):
localctx = sdpParser.Sess_versionContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_sess_version)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 563
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 562
self.digit()
self.state = 565
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NettypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token(self):
return self.getTypedRuleContext(sdpParser.TokenContext,0)
def getRuleIndex(self):
return sdpParser.RULE_nettype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNettype" ):
listener.enterNettype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNettype" ):
listener.exitNettype(self)
def nettype(self):
localctx = sdpParser.NettypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_nettype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 567
self.token()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AddrtypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token(self):
return self.getTypedRuleContext(sdpParser.TokenContext,0)
def getRuleIndex(self):
return sdpParser.RULE_addrtype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAddrtype" ):
listener.enterAddrtype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAddrtype" ):
listener.exitAddrtype(self)
def addrtype(self):
localctx = sdpParser.AddrtypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_addrtype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 569
self.token()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UriContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def uri_reference(self):
return self.getTypedRuleContext(sdpParser.Uri_referenceContext,0)
def getRuleIndex(self):
return sdpParser.RULE_uri
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUri" ):
listener.enterUri(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUri" ):
listener.exitUri(self)
def uri(self):
localctx = sdpParser.UriContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_uri)
try:
self.enterOuterAlt(localctx, 1)
self.state = 571
self.uri_reference()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Email_addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def address_and_comment(self):
return self.getTypedRuleContext(sdpParser.Address_and_commentContext,0)
def dispname_and_address(self):
return self.getTypedRuleContext(sdpParser.Dispname_and_addressContext,0)
def addr_spec(self):
return self.getTypedRuleContext(sdpParser.Addr_specContext,0)
def getRuleIndex(self):
return sdpParser.RULE_email_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEmail_address" ):
listener.enterEmail_address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEmail_address" ):
listener.exitEmail_address(self)
def email_address(self):
localctx = sdpParser.Email_addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_email_address)
try:
self.state = 576
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,22,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 573
self.address_and_comment()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 574
self.dispname_and_address()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 575
self.addr_spec()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Address_and_commentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def addr_spec(self):
return self.getTypedRuleContext(sdpParser.Addr_specContext,0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def email_safe(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Email_safeContext)
else:
return self.getTypedRuleContext(sdpParser.Email_safeContext,i)
def getRuleIndex(self):
return sdpParser.RULE_address_and_comment
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAddress_and_comment" ):
listener.enterAddress_and_comment(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAddress_and_comment" ):
listener.exitAddress_and_comment(self)
def address_and_comment(self):
localctx = sdpParser.Address_and_commentContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_address_and_comment)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 578
self.addr_spec()
self.state = 580
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 579
self.sp()
self.state = 582
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==sdpParser.SPACE):
break
self.state = 584
self.match(sdpParser.LEFT_PAREN)
self.state = 586
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 585
self.email_safe()
self.state = 588
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 1)) & ~0x3f) == 0 and ((1 << (_la - 1)) & ((1 << (sdpParser.TAB - 1)) | (1 << (sdpParser.SPACE - 1)) | (1 << (sdpParser.EXCLAMATION - 1)) | (1 << (sdpParser.QUOTE - 1)) | (1 << (sdpParser.HASH - 1)) | (1 << (sdpParser.DOLLAR - 1)) | (1 << (sdpParser.PERCENT - 1)) | (1 << (sdpParser.AMPERSAND - 1)) | (1 << (sdpParser.APOSTROPHE - 1)) | (1 << (sdpParser.ASTERISK - 1)) | (1 << (sdpParser.PLUS - 1)) | (1 << (sdpParser.COMMA - 1)) | (1 << (sdpParser.DASH - 1)) | (1 << (sdpParser.PERIOD - 1)) | (1 << (sdpParser.SLASH - 1)) | (1 << (sdpParser.ZERO - 1)) | (1 << (sdpParser.ONE - 1)) | (1 << (sdpParser.TWO - 1)) | (1 << (sdpParser.THREE - 1)) | (1 << (sdpParser.FOUR - 1)) | (1 << (sdpParser.FIVE - 1)) | (1 << (sdpParser.SIX - 1)) | (1 << (sdpParser.SEVEN - 1)) | (1 << (sdpParser.EIGHT - 1)) | (1 << (sdpParser.NINE - 1)) | (1 << (sdpParser.COLON - 1)) | (1 << (sdpParser.SEMICOLON - 1)) | (1 << (sdpParser.EQUALS - 1)) | (1 << (sdpParser.QUESTION - 1)) | (1 << (sdpParser.AT - 1)) | (1 << (sdpParser.CAP_A - 1)) | (1 << (sdpParser.CAP_B - 1)) | (1 << (sdpParser.CAP_C - 1)) | (1 << (sdpParser.CAP_D - 1)) | (1 << (sdpParser.CAP_E - 1)) | (1 << (sdpParser.CAP_F - 1)) | (1 << (sdpParser.CAP_G - 1)) | (1 << (sdpParser.CAP_H - 1)) | (1 << (sdpParser.CAP_I - 1)) | (1 << (sdpParser.CAP_J - 1)) | (1 << (sdpParser.CAP_K - 1)) | (1 << (sdpParser.CAP_L - 1)) | (1 << (sdpParser.CAP_M - 1)) | (1 << (sdpParser.CAP_N - 1)) | (1 << (sdpParser.CAP_O - 1)) | (1 << (sdpParser.CAP_P - 1)) | (1 << (sdpParser.CAP_Q - 1)) | (1 << (sdpParser.CAP_R - 1)) | (1 << (sdpParser.CAP_S - 1)) | (1 << (sdpParser.CAP_T - 1)) | (1 << (sdpParser.CAP_U - 1)) | (1 << (sdpParser.CAP_V - 1)) | (1 << (sdpParser.CAP_W - 1)) | (1 << (sdpParser.CAP_X - 1)) | (1 << (sdpParser.CAP_Y - 1)) | (1 << (sdpParser.CAP_Z - 1)) | (1 << (sdpParser.LEFT_BRACE - 1)) | (1 << (sdpParser.BACKSLASH - 1)))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)) | (1 << (sdpParser.U_0001 - 65)) | (1 << (sdpParser.U_0002 - 65)) | (1 << (sdpParser.U_0003 - 65)) | (1 << (sdpParser.U_0004 - 65)) | (1 << (sdpParser.U_0005 - 65)) | (1 << (sdpParser.U_0006 - 65)) | (1 << (sdpParser.U_0007 - 65)) | (1 << (sdpParser.U_0008 - 65)) | (1 << (sdpParser.U_000B - 65)) | (1 << (sdpParser.U_000C - 65)) | (1 << (sdpParser.U_000E - 65)) | (1 << (sdpParser.U_000F - 65)) | (1 << (sdpParser.U_0010 - 65)) | (1 << (sdpParser.U_0011 - 65)) | (1 << (sdpParser.U_0012 - 65)) | (1 << (sdpParser.U_0013 - 65)) | (1 << (sdpParser.U_0014 - 65)) | (1 << (sdpParser.U_0015 - 65)) | (1 << (sdpParser.U_0016 - 65)) | (1 << (sdpParser.U_0017 - 65)) | (1 << (sdpParser.U_0018 - 65)) | (1 << (sdpParser.U_0019 - 65)) | (1 << (sdpParser.U_001A - 65)) | (1 << (sdpParser.U_001B - 65)) | (1 << (sdpParser.U_001C - 65)) | (1 << (sdpParser.U_001D - 65)) | (1 << (sdpParser.U_001E - 65)) | (1 << (sdpParser.U_001F - 65)) | (1 << (sdpParser.U_007F - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (sdpParser.U_0080 - 129)) | (1 << (sdpParser.U_0081 - 129)) | (1 << (sdpParser.U_0082 - 129)) | (1 << (sdpParser.U_0083 - 129)) | (1 << (sdpParser.U_0084 - 129)) | (1 << (sdpParser.U_0085 - 129)) | (1 << (sdpParser.U_0086 - 129)) | (1 << (sdpParser.U_0087 - 129)) | (1 << (sdpParser.U_0088 - 129)) | (1 << (sdpParser.U_0089 - 129)) | (1 << (sdpParser.U_008A - 129)) | (1 << (sdpParser.U_008B - 129)) | (1 << (sdpParser.U_008C - 129)) | (1 << (sdpParser.U_008D - 129)) | (1 << (sdpParser.U_008E - 129)) | (1 << (sdpParser.U_008F - 129)) | (1 << (sdpParser.U_0090 - 129)) | (1 << (sdpParser.U_0091 - 129)) | (1 << (sdpParser.U_0092 - 129)) | (1 << (sdpParser.U_0093 - 129)) | (1 << (sdpParser.U_0094 - 129)) | (1 << (sdpParser.U_0095 - 129)) | (1 << (sdpParser.U_0096 - 129)) | (1 << (sdpParser.U_0097 - 129)) | (1 << (sdpParser.U_0098 - 129)) | (1 << (sdpParser.U_0099 - 129)) | (1 << (sdpParser.U_009A - 129)) | (1 << (sdpParser.U_009B - 129)) | (1 << (sdpParser.U_009C - 129)) | (1 << (sdpParser.U_009D - 129)) | (1 << (sdpParser.U_009E - 129)) | (1 << (sdpParser.U_009F - 129)) | (1 << (sdpParser.U_00A0 - 129)) | (1 << (sdpParser.U_00A1 - 129)) | (1 << (sdpParser.U_00A2 - 129)) | (1 << (sdpParser.U_00A3 - 129)) | (1 << (sdpParser.U_00A4 - 129)) | (1 << (sdpParser.U_00A5 - 129)) | (1 << (sdpParser.U_00A6 - 129)) | (1 << (sdpParser.U_00A7 - 129)) | (1 << (sdpParser.U_00A8 - 129)) | (1 << (sdpParser.U_00A9 - 129)) | (1 << (sdpParser.U_00AA - 129)) | (1 << (sdpParser.U_00AB - 129)) | (1 << (sdpParser.U_00AC - 129)) | (1 << (sdpParser.U_00AD - 129)) | (1 << (sdpParser.U_00AE - 129)) | (1 << (sdpParser.U_00AF - 129)) | (1 << (sdpParser.U_00B0 - 129)) | (1 << (sdpParser.U_00B1 - 129)) | (1 << (sdpParser.U_00B2 - 129)) | (1 << (sdpParser.U_00B3 - 129)) | (1 << (sdpParser.U_00B4 - 129)) | (1 << (sdpParser.U_00B5 - 129)) | (1 << (sdpParser.U_00B6 - 129)) | (1 << (sdpParser.U_00B7 - 129)) | (1 << (sdpParser.U_00B8 - 129)) | (1 << (sdpParser.U_00B9 - 129)) | (1 << (sdpParser.U_00BA - 129)) | (1 << (sdpParser.U_00BB - 129)) | (1 << (sdpParser.U_00BC - 129)) | (1 << (sdpParser.U_00BD - 129)) | (1 << (sdpParser.U_00BE - 129)) | (1 << (sdpParser.U_00BF - 129)))) != 0) or ((((_la - 193)) & ~0x3f) == 0 and ((1 << (_la - 193)) & ((1 << (sdpParser.U_00C0 - 193)) | (1 << (sdpParser.U_00C1 - 193)) | (1 << (sdpParser.U_00C2 - 193)) | (1 << (sdpParser.U_00C3 - 193)) | (1 << (sdpParser.U_00C4 - 193)) | (1 << (sdpParser.U_00C5 - 193)) | (1 << (sdpParser.U_00C6 - 193)) | (1 << (sdpParser.U_00C7 - 193)) | (1 << (sdpParser.U_00C8 - 193)) | (1 << (sdpParser.U_00C9 - 193)) | (1 << (sdpParser.U_00CA - 193)) | (1 << (sdpParser.U_00CB - 193)) | (1 << (sdpParser.U_00CC - 193)) | (1 << (sdpParser.U_00CD - 193)) | (1 << (sdpParser.U_00CE - 193)) | (1 << (sdpParser.U_00CF - 193)) | (1 << (sdpParser.U_00D0 - 193)) | (1 << (sdpParser.U_00D1 - 193)) | (1 << (sdpParser.U_00D2 - 193)) | (1 << (sdpParser.U_00D3 - 193)) | (1 << (sdpParser.U_00D4 - 193)) | (1 << (sdpParser.U_00D5 - 193)) | (1 << (sdpParser.U_00D6 - 193)) | (1 << (sdpParser.U_00D7 - 193)) | (1 << (sdpParser.U_00D8 - 193)) | (1 << (sdpParser.U_00D9 - 193)) | (1 << (sdpParser.U_00DA - 193)) | (1 << (sdpParser.U_00DB - 193)) | (1 << (sdpParser.U_00DC - 193)) | (1 << (sdpParser.U_00DD - 193)) | (1 << (sdpParser.U_00DE - 193)) | (1 << (sdpParser.U_00DF - 193)) | (1 << (sdpParser.U_00E0 - 193)) | (1 << (sdpParser.U_00E1 - 193)) | (1 << (sdpParser.U_00E2 - 193)) | (1 << (sdpParser.U_00E3 - 193)) | (1 << (sdpParser.U_00E4 - 193)) | (1 << (sdpParser.U_00E5 - 193)) | (1 << (sdpParser.U_00E6 - 193)) | (1 << (sdpParser.U_00E7 - 193)) | (1 << (sdpParser.U_00E8 - 193)) | (1 << (sdpParser.U_00E9 - 193)) | (1 << (sdpParser.U_00EA - 193)) | (1 << (sdpParser.U_00EB - 193)) | (1 << (sdpParser.U_00EC - 193)) | (1 << (sdpParser.U_00ED - 193)) | (1 << (sdpParser.U_00EE - 193)) | (1 << (sdpParser.U_00EF - 193)) | (1 << (sdpParser.U_00F0 - 193)) | (1 << (sdpParser.U_00F1 - 193)) | (1 << (sdpParser.U_00F2 - 193)) | (1 << (sdpParser.U_00F3 - 193)) | (1 << (sdpParser.U_00F4 - 193)) | (1 << (sdpParser.U_00F5 - 193)) | (1 << (sdpParser.U_00F6 - 193)) | (1 << (sdpParser.U_00F7 - 193)) | (1 << (sdpParser.U_00F8 - 193)) | (1 << (sdpParser.U_00F9 - 193)) | (1 << (sdpParser.U_00FA - 193)) | (1 << (sdpParser.U_00FB - 193)) | (1 << (sdpParser.U_00FC - 193)) | (1 << (sdpParser.U_00FD - 193)) | (1 << (sdpParser.U_00FE - 193)) | (1 << (sdpParser.U_00FF - 193)))) != 0)):
break
self.state = 590
self.match(sdpParser.RIGHT_PAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dispname_and_addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def addr_spec(self):
return self.getTypedRuleContext(sdpParser.Addr_specContext,0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def email_safe(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Email_safeContext)
else:
return self.getTypedRuleContext(sdpParser.Email_safeContext,i)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def getRuleIndex(self):
return sdpParser.RULE_dispname_and_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDispname_and_address" ):
listener.enterDispname_and_address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDispname_and_address" ):
listener.exitDispname_and_address(self)
def dispname_and_address(self):
localctx = sdpParser.Dispname_and_addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_dispname_and_address)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 593
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 592
self.email_safe()
else:
raise NoViableAltException(self)
self.state = 595
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,25,self._ctx)
self.state = 598
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 597
self.sp()
self.state = 600
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==sdpParser.SPACE):
break
self.state = 602
self.match(sdpParser.LESS_THAN)
self.state = 603
self.addr_spec()
self.state = 604
self.match(sdpParser.GREATER_THAN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Phone_numberContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def phone(self):
return self.getTypedRuleContext(sdpParser.PhoneContext,0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def email_safe(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Email_safeContext)
else:
return self.getTypedRuleContext(sdpParser.Email_safeContext,i)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def getRuleIndex(self):
return sdpParser.RULE_phone_number
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPhone_number" ):
listener.enterPhone_number(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPhone_number" ):
listener.exitPhone_number(self)
def phone_number(self):
localctx = sdpParser.Phone_numberContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_phone_number)
self._la = 0 # Token type
try:
self.state = 631
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,30,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 606
self.phone()
self.state = 610
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.SPACE:
self.state = 607
self.sp()
self.state = 612
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 613
self.match(sdpParser.LEFT_PAREN)
self.state = 615
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 614
self.email_safe()
self.state = 617
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 1)) & ~0x3f) == 0 and ((1 << (_la - 1)) & ((1 << (sdpParser.TAB - 1)) | (1 << (sdpParser.SPACE - 1)) | (1 << (sdpParser.EXCLAMATION - 1)) | (1 << (sdpParser.QUOTE - 1)) | (1 << (sdpParser.HASH - 1)) | (1 << (sdpParser.DOLLAR - 1)) | (1 << (sdpParser.PERCENT - 1)) | (1 << (sdpParser.AMPERSAND - 1)) | (1 << (sdpParser.APOSTROPHE - 1)) | (1 << (sdpParser.ASTERISK - 1)) | (1 << (sdpParser.PLUS - 1)) | (1 << (sdpParser.COMMA - 1)) | (1 << (sdpParser.DASH - 1)) | (1 << (sdpParser.PERIOD - 1)) | (1 << (sdpParser.SLASH - 1)) | (1 << (sdpParser.ZERO - 1)) | (1 << (sdpParser.ONE - 1)) | (1 << (sdpParser.TWO - 1)) | (1 << (sdpParser.THREE - 1)) | (1 << (sdpParser.FOUR - 1)) | (1 << (sdpParser.FIVE - 1)) | (1 << (sdpParser.SIX - 1)) | (1 << (sdpParser.SEVEN - 1)) | (1 << (sdpParser.EIGHT - 1)) | (1 << (sdpParser.NINE - 1)) | (1 << (sdpParser.COLON - 1)) | (1 << (sdpParser.SEMICOLON - 1)) | (1 << (sdpParser.EQUALS - 1)) | (1 << (sdpParser.QUESTION - 1)) | (1 << (sdpParser.AT - 1)) | (1 << (sdpParser.CAP_A - 1)) | (1 << (sdpParser.CAP_B - 1)) | (1 << (sdpParser.CAP_C - 1)) | (1 << (sdpParser.CAP_D - 1)) | (1 << (sdpParser.CAP_E - 1)) | (1 << (sdpParser.CAP_F - 1)) | (1 << (sdpParser.CAP_G - 1)) | (1 << (sdpParser.CAP_H - 1)) | (1 << (sdpParser.CAP_I - 1)) | (1 << (sdpParser.CAP_J - 1)) | (1 << (sdpParser.CAP_K - 1)) | (1 << (sdpParser.CAP_L - 1)) | (1 << (sdpParser.CAP_M - 1)) | (1 << (sdpParser.CAP_N - 1)) | (1 << (sdpParser.CAP_O - 1)) | (1 << (sdpParser.CAP_P - 1)) | (1 << (sdpParser.CAP_Q - 1)) | (1 << (sdpParser.CAP_R - 1)) | (1 << (sdpParser.CAP_S - 1)) | (1 << (sdpParser.CAP_T - 1)) | (1 << (sdpParser.CAP_U - 1)) | (1 << (sdpParser.CAP_V - 1)) | (1 << (sdpParser.CAP_W - 1)) | (1 << (sdpParser.CAP_X - 1)) | (1 << (sdpParser.CAP_Y - 1)) | (1 << (sdpParser.CAP_Z - 1)) | (1 << (sdpParser.LEFT_BRACE - 1)) | (1 << (sdpParser.BACKSLASH - 1)))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)) | (1 << (sdpParser.U_0001 - 65)) | (1 << (sdpParser.U_0002 - 65)) | (1 << (sdpParser.U_0003 - 65)) | (1 << (sdpParser.U_0004 - 65)) | (1 << (sdpParser.U_0005 - 65)) | (1 << (sdpParser.U_0006 - 65)) | (1 << (sdpParser.U_0007 - 65)) | (1 << (sdpParser.U_0008 - 65)) | (1 << (sdpParser.U_000B - 65)) | (1 << (sdpParser.U_000C - 65)) | (1 << (sdpParser.U_000E - 65)) | (1 << (sdpParser.U_000F - 65)) | (1 << (sdpParser.U_0010 - 65)) | (1 << (sdpParser.U_0011 - 65)) | (1 << (sdpParser.U_0012 - 65)) | (1 << (sdpParser.U_0013 - 65)) | (1 << (sdpParser.U_0014 - 65)) | (1 << (sdpParser.U_0015 - 65)) | (1 << (sdpParser.U_0016 - 65)) | (1 << (sdpParser.U_0017 - 65)) | (1 << (sdpParser.U_0018 - 65)) | (1 << (sdpParser.U_0019 - 65)) | (1 << (sdpParser.U_001A - 65)) | (1 << (sdpParser.U_001B - 65)) | (1 << (sdpParser.U_001C - 65)) | (1 << (sdpParser.U_001D - 65)) | (1 << (sdpParser.U_001E - 65)) | (1 << (sdpParser.U_001F - 65)) | (1 << (sdpParser.U_007F - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (sdpParser.U_0080 - 129)) | (1 << (sdpParser.U_0081 - 129)) | (1 << (sdpParser.U_0082 - 129)) | (1 << (sdpParser.U_0083 - 129)) | (1 << (sdpParser.U_0084 - 129)) | (1 << (sdpParser.U_0085 - 129)) | (1 << (sdpParser.U_0086 - 129)) | (1 << (sdpParser.U_0087 - 129)) | (1 << (sdpParser.U_0088 - 129)) | (1 << (sdpParser.U_0089 - 129)) | (1 << (sdpParser.U_008A - 129)) | (1 << (sdpParser.U_008B - 129)) | (1 << (sdpParser.U_008C - 129)) | (1 << (sdpParser.U_008D - 129)) | (1 << (sdpParser.U_008E - 129)) | (1 << (sdpParser.U_008F - 129)) | (1 << (sdpParser.U_0090 - 129)) | (1 << (sdpParser.U_0091 - 129)) | (1 << (sdpParser.U_0092 - 129)) | (1 << (sdpParser.U_0093 - 129)) | (1 << (sdpParser.U_0094 - 129)) | (1 << (sdpParser.U_0095 - 129)) | (1 << (sdpParser.U_0096 - 129)) | (1 << (sdpParser.U_0097 - 129)) | (1 << (sdpParser.U_0098 - 129)) | (1 << (sdpParser.U_0099 - 129)) | (1 << (sdpParser.U_009A - 129)) | (1 << (sdpParser.U_009B - 129)) | (1 << (sdpParser.U_009C - 129)) | (1 << (sdpParser.U_009D - 129)) | (1 << (sdpParser.U_009E - 129)) | (1 << (sdpParser.U_009F - 129)) | (1 << (sdpParser.U_00A0 - 129)) | (1 << (sdpParser.U_00A1 - 129)) | (1 << (sdpParser.U_00A2 - 129)) | (1 << (sdpParser.U_00A3 - 129)) | (1 << (sdpParser.U_00A4 - 129)) | (1 << (sdpParser.U_00A5 - 129)) | (1 << (sdpParser.U_00A6 - 129)) | (1 << (sdpParser.U_00A7 - 129)) | (1 << (sdpParser.U_00A8 - 129)) | (1 << (sdpParser.U_00A9 - 129)) | (1 << (sdpParser.U_00AA - 129)) | (1 << (sdpParser.U_00AB - 129)) | (1 << (sdpParser.U_00AC - 129)) | (1 << (sdpParser.U_00AD - 129)) | (1 << (sdpParser.U_00AE - 129)) | (1 << (sdpParser.U_00AF - 129)) | (1 << (sdpParser.U_00B0 - 129)) | (1 << (sdpParser.U_00B1 - 129)) | (1 << (sdpParser.U_00B2 - 129)) | (1 << (sdpParser.U_00B3 - 129)) | (1 << (sdpParser.U_00B4 - 129)) | (1 << (sdpParser.U_00B5 - 129)) | (1 << (sdpParser.U_00B6 - 129)) | (1 << (sdpParser.U_00B7 - 129)) | (1 << (sdpParser.U_00B8 - 129)) | (1 << (sdpParser.U_00B9 - 129)) | (1 << (sdpParser.U_00BA - 129)) | (1 << (sdpParser.U_00BB - 129)) | (1 << (sdpParser.U_00BC - 129)) | (1 << (sdpParser.U_00BD - 129)) | (1 << (sdpParser.U_00BE - 129)) | (1 << (sdpParser.U_00BF - 129)))) != 0) or ((((_la - 193)) & ~0x3f) == 0 and ((1 << (_la - 193)) & ((1 << (sdpParser.U_00C0 - 193)) | (1 << (sdpParser.U_00C1 - 193)) | (1 << (sdpParser.U_00C2 - 193)) | (1 << (sdpParser.U_00C3 - 193)) | (1 << (sdpParser.U_00C4 - 193)) | (1 << (sdpParser.U_00C5 - 193)) | (1 << (sdpParser.U_00C6 - 193)) | (1 << (sdpParser.U_00C7 - 193)) | (1 << (sdpParser.U_00C8 - 193)) | (1 << (sdpParser.U_00C9 - 193)) | (1 << (sdpParser.U_00CA - 193)) | (1 << (sdpParser.U_00CB - 193)) | (1 << (sdpParser.U_00CC - 193)) | (1 << (sdpParser.U_00CD - 193)) | (1 << (sdpParser.U_00CE - 193)) | (1 << (sdpParser.U_00CF - 193)) | (1 << (sdpParser.U_00D0 - 193)) | (1 << (sdpParser.U_00D1 - 193)) | (1 << (sdpParser.U_00D2 - 193)) | (1 << (sdpParser.U_00D3 - 193)) | (1 << (sdpParser.U_00D4 - 193)) | (1 << (sdpParser.U_00D5 - 193)) | (1 << (sdpParser.U_00D6 - 193)) | (1 << (sdpParser.U_00D7 - 193)) | (1 << (sdpParser.U_00D8 - 193)) | (1 << (sdpParser.U_00D9 - 193)) | (1 << (sdpParser.U_00DA - 193)) | (1 << (sdpParser.U_00DB - 193)) | (1 << (sdpParser.U_00DC - 193)) | (1 << (sdpParser.U_00DD - 193)) | (1 << (sdpParser.U_00DE - 193)) | (1 << (sdpParser.U_00DF - 193)) | (1 << (sdpParser.U_00E0 - 193)) | (1 << (sdpParser.U_00E1 - 193)) | (1 << (sdpParser.U_00E2 - 193)) | (1 << (sdpParser.U_00E3 - 193)) | (1 << (sdpParser.U_00E4 - 193)) | (1 << (sdpParser.U_00E5 - 193)) | (1 << (sdpParser.U_00E6 - 193)) | (1 << (sdpParser.U_00E7 - 193)) | (1 << (sdpParser.U_00E8 - 193)) | (1 << (sdpParser.U_00E9 - 193)) | (1 << (sdpParser.U_00EA - 193)) | (1 << (sdpParser.U_00EB - 193)) | (1 << (sdpParser.U_00EC - 193)) | (1 << (sdpParser.U_00ED - 193)) | (1 << (sdpParser.U_00EE - 193)) | (1 << (sdpParser.U_00EF - 193)) | (1 << (sdpParser.U_00F0 - 193)) | (1 << (sdpParser.U_00F1 - 193)) | (1 << (sdpParser.U_00F2 - 193)) | (1 << (sdpParser.U_00F3 - 193)) | (1 << (sdpParser.U_00F4 - 193)) | (1 << (sdpParser.U_00F5 - 193)) | (1 << (sdpParser.U_00F6 - 193)) | (1 << (sdpParser.U_00F7 - 193)) | (1 << (sdpParser.U_00F8 - 193)) | (1 << (sdpParser.U_00F9 - 193)) | (1 << (sdpParser.U_00FA - 193)) | (1 << (sdpParser.U_00FB - 193)) | (1 << (sdpParser.U_00FC - 193)) | (1 << (sdpParser.U_00FD - 193)) | (1 << (sdpParser.U_00FE - 193)) | (1 << (sdpParser.U_00FF - 193)))) != 0)):
break
self.state = 619
self.match(sdpParser.RIGHT_PAREN)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 622
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 621
self.email_safe()
self.state = 624
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 1)) & ~0x3f) == 0 and ((1 << (_la - 1)) & ((1 << (sdpParser.TAB - 1)) | (1 << (sdpParser.SPACE - 1)) | (1 << (sdpParser.EXCLAMATION - 1)) | (1 << (sdpParser.QUOTE - 1)) | (1 << (sdpParser.HASH - 1)) | (1 << (sdpParser.DOLLAR - 1)) | (1 << (sdpParser.PERCENT - 1)) | (1 << (sdpParser.AMPERSAND - 1)) | (1 << (sdpParser.APOSTROPHE - 1)) | (1 << (sdpParser.ASTERISK - 1)) | (1 << (sdpParser.PLUS - 1)) | (1 << (sdpParser.COMMA - 1)) | (1 << (sdpParser.DASH - 1)) | (1 << (sdpParser.PERIOD - 1)) | (1 << (sdpParser.SLASH - 1)) | (1 << (sdpParser.ZERO - 1)) | (1 << (sdpParser.ONE - 1)) | (1 << (sdpParser.TWO - 1)) | (1 << (sdpParser.THREE - 1)) | (1 << (sdpParser.FOUR - 1)) | (1 << (sdpParser.FIVE - 1)) | (1 << (sdpParser.SIX - 1)) | (1 << (sdpParser.SEVEN - 1)) | (1 << (sdpParser.EIGHT - 1)) | (1 << (sdpParser.NINE - 1)) | (1 << (sdpParser.COLON - 1)) | (1 << (sdpParser.SEMICOLON - 1)) | (1 << (sdpParser.EQUALS - 1)) | (1 << (sdpParser.QUESTION - 1)) | (1 << (sdpParser.AT - 1)) | (1 << (sdpParser.CAP_A - 1)) | (1 << (sdpParser.CAP_B - 1)) | (1 << (sdpParser.CAP_C - 1)) | (1 << (sdpParser.CAP_D - 1)) | (1 << (sdpParser.CAP_E - 1)) | (1 << (sdpParser.CAP_F - 1)) | (1 << (sdpParser.CAP_G - 1)) | (1 << (sdpParser.CAP_H - 1)) | (1 << (sdpParser.CAP_I - 1)) | (1 << (sdpParser.CAP_J - 1)) | (1 << (sdpParser.CAP_K - 1)) | (1 << (sdpParser.CAP_L - 1)) | (1 << (sdpParser.CAP_M - 1)) | (1 << (sdpParser.CAP_N - 1)) | (1 << (sdpParser.CAP_O - 1)) | (1 << (sdpParser.CAP_P - 1)) | (1 << (sdpParser.CAP_Q - 1)) | (1 << (sdpParser.CAP_R - 1)) | (1 << (sdpParser.CAP_S - 1)) | (1 << (sdpParser.CAP_T - 1)) | (1 << (sdpParser.CAP_U - 1)) | (1 << (sdpParser.CAP_V - 1)) | (1 << (sdpParser.CAP_W - 1)) | (1 << (sdpParser.CAP_X - 1)) | (1 << (sdpParser.CAP_Y - 1)) | (1 << (sdpParser.CAP_Z - 1)) | (1 << (sdpParser.LEFT_BRACE - 1)) | (1 << (sdpParser.BACKSLASH - 1)))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)) | (1 << (sdpParser.U_0001 - 65)) | (1 << (sdpParser.U_0002 - 65)) | (1 << (sdpParser.U_0003 - 65)) | (1 << (sdpParser.U_0004 - 65)) | (1 << (sdpParser.U_0005 - 65)) | (1 << (sdpParser.U_0006 - 65)) | (1 << (sdpParser.U_0007 - 65)) | (1 << (sdpParser.U_0008 - 65)) | (1 << (sdpParser.U_000B - 65)) | (1 << (sdpParser.U_000C - 65)) | (1 << (sdpParser.U_000E - 65)) | (1 << (sdpParser.U_000F - 65)) | (1 << (sdpParser.U_0010 - 65)) | (1 << (sdpParser.U_0011 - 65)) | (1 << (sdpParser.U_0012 - 65)) | (1 << (sdpParser.U_0013 - 65)) | (1 << (sdpParser.U_0014 - 65)) | (1 << (sdpParser.U_0015 - 65)) | (1 << (sdpParser.U_0016 - 65)) | (1 << (sdpParser.U_0017 - 65)) | (1 << (sdpParser.U_0018 - 65)) | (1 << (sdpParser.U_0019 - 65)) | (1 << (sdpParser.U_001A - 65)) | (1 << (sdpParser.U_001B - 65)) | (1 << (sdpParser.U_001C - 65)) | (1 << (sdpParser.U_001D - 65)) | (1 << (sdpParser.U_001E - 65)) | (1 << (sdpParser.U_001F - 65)) | (1 << (sdpParser.U_007F - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (sdpParser.U_0080 - 129)) | (1 << (sdpParser.U_0081 - 129)) | (1 << (sdpParser.U_0082 - 129)) | (1 << (sdpParser.U_0083 - 129)) | (1 << (sdpParser.U_0084 - 129)) | (1 << (sdpParser.U_0085 - 129)) | (1 << (sdpParser.U_0086 - 129)) | (1 << (sdpParser.U_0087 - 129)) | (1 << (sdpParser.U_0088 - 129)) | (1 << (sdpParser.U_0089 - 129)) | (1 << (sdpParser.U_008A - 129)) | (1 << (sdpParser.U_008B - 129)) | (1 << (sdpParser.U_008C - 129)) | (1 << (sdpParser.U_008D - 129)) | (1 << (sdpParser.U_008E - 129)) | (1 << (sdpParser.U_008F - 129)) | (1 << (sdpParser.U_0090 - 129)) | (1 << (sdpParser.U_0091 - 129)) | (1 << (sdpParser.U_0092 - 129)) | (1 << (sdpParser.U_0093 - 129)) | (1 << (sdpParser.U_0094 - 129)) | (1 << (sdpParser.U_0095 - 129)) | (1 << (sdpParser.U_0096 - 129)) | (1 << (sdpParser.U_0097 - 129)) | (1 << (sdpParser.U_0098 - 129)) | (1 << (sdpParser.U_0099 - 129)) | (1 << (sdpParser.U_009A - 129)) | (1 << (sdpParser.U_009B - 129)) | (1 << (sdpParser.U_009C - 129)) | (1 << (sdpParser.U_009D - 129)) | (1 << (sdpParser.U_009E - 129)) | (1 << (sdpParser.U_009F - 129)) | (1 << (sdpParser.U_00A0 - 129)) | (1 << (sdpParser.U_00A1 - 129)) | (1 << (sdpParser.U_00A2 - 129)) | (1 << (sdpParser.U_00A3 - 129)) | (1 << (sdpParser.U_00A4 - 129)) | (1 << (sdpParser.U_00A5 - 129)) | (1 << (sdpParser.U_00A6 - 129)) | (1 << (sdpParser.U_00A7 - 129)) | (1 << (sdpParser.U_00A8 - 129)) | (1 << (sdpParser.U_00A9 - 129)) | (1 << (sdpParser.U_00AA - 129)) | (1 << (sdpParser.U_00AB - 129)) | (1 << (sdpParser.U_00AC - 129)) | (1 << (sdpParser.U_00AD - 129)) | (1 << (sdpParser.U_00AE - 129)) | (1 << (sdpParser.U_00AF - 129)) | (1 << (sdpParser.U_00B0 - 129)) | (1 << (sdpParser.U_00B1 - 129)) | (1 << (sdpParser.U_00B2 - 129)) | (1 << (sdpParser.U_00B3 - 129)) | (1 << (sdpParser.U_00B4 - 129)) | (1 << (sdpParser.U_00B5 - 129)) | (1 << (sdpParser.U_00B6 - 129)) | (1 << (sdpParser.U_00B7 - 129)) | (1 << (sdpParser.U_00B8 - 129)) | (1 << (sdpParser.U_00B9 - 129)) | (1 << (sdpParser.U_00BA - 129)) | (1 << (sdpParser.U_00BB - 129)) | (1 << (sdpParser.U_00BC - 129)) | (1 << (sdpParser.U_00BD - 129)) | (1 << (sdpParser.U_00BE - 129)) | (1 << (sdpParser.U_00BF - 129)))) != 0) or ((((_la - 193)) & ~0x3f) == 0 and ((1 << (_la - 193)) & ((1 << (sdpParser.U_00C0 - 193)) | (1 << (sdpParser.U_00C1 - 193)) | (1 << (sdpParser.U_00C2 - 193)) | (1 << (sdpParser.U_00C3 - 193)) | (1 << (sdpParser.U_00C4 - 193)) | (1 << (sdpParser.U_00C5 - 193)) | (1 << (sdpParser.U_00C6 - 193)) | (1 << (sdpParser.U_00C7 - 193)) | (1 << (sdpParser.U_00C8 - 193)) | (1 << (sdpParser.U_00C9 - 193)) | (1 << (sdpParser.U_00CA - 193)) | (1 << (sdpParser.U_00CB - 193)) | (1 << (sdpParser.U_00CC - 193)) | (1 << (sdpParser.U_00CD - 193)) | (1 << (sdpParser.U_00CE - 193)) | (1 << (sdpParser.U_00CF - 193)) | (1 << (sdpParser.U_00D0 - 193)) | (1 << (sdpParser.U_00D1 - 193)) | (1 << (sdpParser.U_00D2 - 193)) | (1 << (sdpParser.U_00D3 - 193)) | (1 << (sdpParser.U_00D4 - 193)) | (1 << (sdpParser.U_00D5 - 193)) | (1 << (sdpParser.U_00D6 - 193)) | (1 << (sdpParser.U_00D7 - 193)) | (1 << (sdpParser.U_00D8 - 193)) | (1 << (sdpParser.U_00D9 - 193)) | (1 << (sdpParser.U_00DA - 193)) | (1 << (sdpParser.U_00DB - 193)) | (1 << (sdpParser.U_00DC - 193)) | (1 << (sdpParser.U_00DD - 193)) | (1 << (sdpParser.U_00DE - 193)) | (1 << (sdpParser.U_00DF - 193)) | (1 << (sdpParser.U_00E0 - 193)) | (1 << (sdpParser.U_00E1 - 193)) | (1 << (sdpParser.U_00E2 - 193)) | (1 << (sdpParser.U_00E3 - 193)) | (1 << (sdpParser.U_00E4 - 193)) | (1 << (sdpParser.U_00E5 - 193)) | (1 << (sdpParser.U_00E6 - 193)) | (1 << (sdpParser.U_00E7 - 193)) | (1 << (sdpParser.U_00E8 - 193)) | (1 << (sdpParser.U_00E9 - 193)) | (1 << (sdpParser.U_00EA - 193)) | (1 << (sdpParser.U_00EB - 193)) | (1 << (sdpParser.U_00EC - 193)) | (1 << (sdpParser.U_00ED - 193)) | (1 << (sdpParser.U_00EE - 193)) | (1 << (sdpParser.U_00EF - 193)) | (1 << (sdpParser.U_00F0 - 193)) | (1 << (sdpParser.U_00F1 - 193)) | (1 << (sdpParser.U_00F2 - 193)) | (1 << (sdpParser.U_00F3 - 193)) | (1 << (sdpParser.U_00F4 - 193)) | (1 << (sdpParser.U_00F5 - 193)) | (1 << (sdpParser.U_00F6 - 193)) | (1 << (sdpParser.U_00F7 - 193)) | (1 << (sdpParser.U_00F8 - 193)) | (1 << (sdpParser.U_00F9 - 193)) | (1 << (sdpParser.U_00FA - 193)) | (1 << (sdpParser.U_00FB - 193)) | (1 << (sdpParser.U_00FC - 193)) | (1 << (sdpParser.U_00FD - 193)) | (1 << (sdpParser.U_00FE - 193)) | (1 << (sdpParser.U_00FF - 193)))) != 0)):
break
self.state = 626
self.match(sdpParser.LESS_THAN)
self.state = 627
self.phone()
self.state = 628
self.match(sdpParser.GREATER_THAN)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 630
self.phone()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PhoneContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def sp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SpContext)
else:
return self.getTypedRuleContext(sdpParser.SpContext,i)
def DASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.DASH)
else:
return self.getToken(sdpParser.DASH, i)
def getRuleIndex(self):
return sdpParser.RULE_phone
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPhone" ):
listener.enterPhone(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPhone" ):
listener.exitPhone(self)
def phone(self):
localctx = sdpParser.PhoneContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_phone)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 634
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.PLUS:
self.state = 633
self.match(sdpParser.PLUS)
self.state = 636
self.digit()
self.state = 640
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 640
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.SPACE]:
self.state = 637
self.sp()
pass
elif token in [sdpParser.DASH]:
self.state = 638
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.state = 639
self.digit()
pass
else:
raise NoViableAltException(self)
else:
raise NoViableAltException(self)
self.state = 642
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,33,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Connection_addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def multicast_address(self):
return self.getTypedRuleContext(sdpParser.Multicast_addressContext,0)
def unicast_address(self):
return self.getTypedRuleContext(sdpParser.Unicast_addressContext,0)
def getRuleIndex(self):
return sdpParser.RULE_connection_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConnection_address" ):
listener.enterConnection_address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConnection_address" ):
listener.exitConnection_address(self)
def connection_address(self):
localctx = sdpParser.Connection_addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_connection_address)
try:
self.state = 646
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 644
self.multicast_address()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 645
self.unicast_address()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BwtypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token(self):
return self.getTypedRuleContext(sdpParser.TokenContext,0)
def getRuleIndex(self):
return sdpParser.RULE_bwtype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBwtype" ):
listener.enterBwtype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBwtype" ):
listener.exitBwtype(self)
def bwtype(self):
localctx = sdpParser.BwtypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_bwtype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 648
self.token()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BandwidthContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_bandwidth
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBandwidth" ):
listener.enterBandwidth(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBandwidth" ):
listener.exitBandwidth(self)
def bandwidth(self):
localctx = sdpParser.BandwidthContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_bandwidth)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 651
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 650
self.digit()
self.state = 653
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Start_timeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def time(self):
return self.getTypedRuleContext(sdpParser.TimeContext,0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def getRuleIndex(self):
return sdpParser.RULE_start_time
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStart_time" ):
listener.enterStart_time(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStart_time" ):
listener.exitStart_time(self)
def start_time(self):
localctx = sdpParser.Start_timeContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_start_time)
try:
self.state = 657
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 1)
self.state = 655
self.time()
pass
elif token in [sdpParser.ZERO]:
self.enterOuterAlt(localctx, 2)
self.state = 656
self.match(sdpParser.ZERO)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Stop_timeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def time(self):
return self.getTypedRuleContext(sdpParser.TimeContext,0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def getRuleIndex(self):
return sdpParser.RULE_stop_time
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStop_time" ):
listener.enterStop_time(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStop_time" ):
listener.exitStop_time(self)
def stop_time(self):
localctx = sdpParser.Stop_timeContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_stop_time)
try:
self.state = 661
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 1)
self.state = 659
self.time()
pass
elif token in [sdpParser.ZERO]:
self.enterOuterAlt(localctx, 2)
self.state = 660
self.match(sdpParser.ZERO)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TimeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pos_digit(self):
return self.getTypedRuleContext(sdpParser.Pos_digitContext,0)
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_time
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTime" ):
listener.enterTime(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTime" ):
listener.exitTime(self)
def time(self):
localctx = sdpParser.TimeContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_time)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 663
self.pos_digit()
self.state = 664
self.digit()
self.state = 665
self.digit()
self.state = 666
self.digit()
self.state = 667
self.digit()
self.state = 668
self.digit()
self.state = 669
self.digit()
self.state = 670
self.digit()
self.state = 671
self.digit()
self.state = 673
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 672
self.digit()
self.state = 675
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Repeat_intervalContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pos_digit(self):
return self.getTypedRuleContext(sdpParser.Pos_digitContext,0)
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def fixed_len_time_unit(self):
return self.getTypedRuleContext(sdpParser.Fixed_len_time_unitContext,0)
def getRuleIndex(self):
return sdpParser.RULE_repeat_interval
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRepeat_interval" ):
listener.enterRepeat_interval(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRepeat_interval" ):
listener.exitRepeat_interval(self)
def repeat_interval(self):
localctx = sdpParser.Repeat_intervalContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_repeat_interval)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 677
self.pos_digit()
self.state = 681
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0):
self.state = 678
self.digit()
self.state = 683
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 685
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (sdpParser.D - 72)) | (1 << (sdpParser.H - 72)) | (1 << (sdpParser.M - 72)) | (1 << (sdpParser.S - 72)))) != 0):
self.state = 684
self.fixed_len_time_unit()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Typed_timeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def fixed_len_time_unit(self):
return self.getTypedRuleContext(sdpParser.Fixed_len_time_unitContext,0)
def getRuleIndex(self):
return sdpParser.RULE_typed_time
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTyped_time" ):
listener.enterTyped_time(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTyped_time" ):
listener.exitTyped_time(self)
def typed_time(self):
localctx = sdpParser.Typed_timeContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_typed_time)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 688
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 687
self.digit()
self.state = 690
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
self.state = 693
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (sdpParser.D - 72)) | (1 << (sdpParser.H - 72)) | (1 << (sdpParser.M - 72)) | (1 << (sdpParser.S - 72)))) != 0):
self.state = 692
self.fixed_len_time_unit()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fixed_len_time_unitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def D(self):
return self.getToken(sdpParser.D, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def getRuleIndex(self):
return sdpParser.RULE_fixed_len_time_unit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFixed_len_time_unit" ):
listener.enterFixed_len_time_unit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFixed_len_time_unit" ):
listener.exitFixed_len_time_unit(self)
def fixed_len_time_unit(self):
localctx = sdpParser.Fixed_len_time_unitContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_fixed_len_time_unit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 695
_la = self._input.LA(1)
if not(((((_la - 72)) & ~0x3f) == 0 and ((1 << (_la - 72)) & ((1 << (sdpParser.D - 72)) | (1 << (sdpParser.H - 72)) | (1 << (sdpParser.M - 72)) | (1 << (sdpParser.S - 72)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Key_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def P(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.P)
else:
return self.getToken(sdpParser.P, i)
def R(self):
return self.getToken(sdpParser.R, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def text(self):
return self.getTypedRuleContext(sdpParser.TextContext,0)
def B(self):
return self.getToken(sdpParser.B, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def base64(self):
return self.getTypedRuleContext(sdpParser.Base64Context,0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def uri(self):
return self.getTypedRuleContext(sdpParser.UriContext,0)
def getRuleIndex(self):
return sdpParser.RULE_key_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKey_type" ):
listener.enterKey_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKey_type" ):
listener.exitKey_type(self)
def key_type(self):
localctx = sdpParser.Key_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_key_type)
try:
self.state = 724
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.P]:
self.enterOuterAlt(localctx, 1)
self.state = 697
self.match(sdpParser.P)
self.state = 698
self.match(sdpParser.R)
self.state = 699
self.match(sdpParser.O)
self.state = 700
self.match(sdpParser.M)
self.state = 701
self.match(sdpParser.P)
self.state = 702
self.match(sdpParser.T)
pass
elif token in [sdpParser.C]:
self.enterOuterAlt(localctx, 2)
self.state = 703
self.match(sdpParser.C)
self.state = 704
self.match(sdpParser.L)
self.state = 705
self.match(sdpParser.E)
self.state = 706
self.match(sdpParser.A)
self.state = 707
self.match(sdpParser.R)
self.state = 708
self.match(sdpParser.COLON)
self.state = 709
self.text()
pass
elif token in [sdpParser.B]:
self.enterOuterAlt(localctx, 3)
self.state = 710
self.match(sdpParser.B)
self.state = 711
self.match(sdpParser.A)
self.state = 712
self.match(sdpParser.S)
self.state = 713
self.match(sdpParser.E)
self.state = 714
self.match(sdpParser.SIX)
self.state = 715
self.match(sdpParser.FOUR)
self.state = 716
self.match(sdpParser.COLON)
self.state = 718
self.base64()
pass
elif token in [sdpParser.U]:
self.enterOuterAlt(localctx, 4)
self.state = 719
self.match(sdpParser.U)
self.state = 720
self.match(sdpParser.R)
self.state = 721
self.match(sdpParser.I)
self.state = 722
self.match(sdpParser.COLON)
self.state = 723
self.uri()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Base64Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def base64_unit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Base64_unitContext)
else:
return self.getTypedRuleContext(sdpParser.Base64_unitContext,i)
def base64_pad(self):
return self.getTypedRuleContext(sdpParser.Base64_padContext,0)
def getRuleIndex(self):
return sdpParser.RULE_base64
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBase64" ):
listener.enterBase64(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBase64" ):
listener.exitBase64(self)
def base64(self):
localctx = sdpParser.Base64Context(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_base64)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 729
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,44,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 726
self.base64_unit()
self.state = 731
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,44,self._ctx)
self.state = 733
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.PLUS) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 69)) & ~0x3f) == 0 and ((1 << (_la - 69)) & ((1 << (sdpParser.A - 69)) | (1 << (sdpParser.B - 69)) | (1 << (sdpParser.C - 69)) | (1 << (sdpParser.D - 69)) | (1 << (sdpParser.E - 69)) | (1 << (sdpParser.F - 69)) | (1 << (sdpParser.G - 69)) | (1 << (sdpParser.H - 69)) | (1 << (sdpParser.I - 69)) | (1 << (sdpParser.J - 69)) | (1 << (sdpParser.K - 69)) | (1 << (sdpParser.L - 69)) | (1 << (sdpParser.M - 69)) | (1 << (sdpParser.N - 69)) | (1 << (sdpParser.O - 69)) | (1 << (sdpParser.P - 69)) | (1 << (sdpParser.Q - 69)) | (1 << (sdpParser.R - 69)) | (1 << (sdpParser.S - 69)) | (1 << (sdpParser.T - 69)) | (1 << (sdpParser.U - 69)) | (1 << (sdpParser.V - 69)) | (1 << (sdpParser.W - 69)) | (1 << (sdpParser.X - 69)) | (1 << (sdpParser.Y - 69)) | (1 << (sdpParser.Z - 69)))) != 0):
self.state = 732
self.base64_pad()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Base64_unitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def base64_char(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Base64_charContext)
else:
return self.getTypedRuleContext(sdpParser.Base64_charContext,i)
def getRuleIndex(self):
return sdpParser.RULE_base64_unit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBase64_unit" ):
listener.enterBase64_unit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBase64_unit" ):
listener.exitBase64_unit(self)
def base64_unit(self):
localctx = sdpParser.Base64_unitContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_base64_unit)
try:
self.enterOuterAlt(localctx, 1)
self.state = 735
self.base64_char()
self.state = 736
self.base64_char()
self.state = 737
self.base64_char()
self.state = 738
self.base64_char()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Base64_padContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def base64_char(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Base64_charContext)
else:
return self.getTypedRuleContext(sdpParser.Base64_charContext,i)
def EQUALS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EQUALS)
else:
return self.getToken(sdpParser.EQUALS, i)
def getRuleIndex(self):
return sdpParser.RULE_base64_pad
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBase64_pad" ):
listener.enterBase64_pad(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBase64_pad" ):
listener.exitBase64_pad(self)
def base64_pad(self):
localctx = sdpParser.Base64_padContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_base64_pad)
try:
self.state = 752
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,46,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 740
self.base64_char()
self.state = 741
self.base64_char()
self.state = 743
self.match(sdpParser.EQUALS)
self.state = 744
self.match(sdpParser.EQUALS)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 746
self.base64_char()
self.state = 747
self.base64_char()
self.state = 748
self.base64_char()
self.state = 750
self.match(sdpParser.EQUALS)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Base64_charContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def alpha(self):
return self.getTypedRuleContext(sdpParser.AlphaContext,0)
def digit(self):
return self.getTypedRuleContext(sdpParser.DigitContext,0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def getRuleIndex(self):
return sdpParser.RULE_base64_char
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBase64_char" ):
listener.enterBase64_char(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBase64_char" ):
listener.exitBase64_char(self)
def base64_char(self):
localctx = sdpParser.Base64_charContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_base64_char)
try:
self.state = 758
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.enterOuterAlt(localctx, 1)
self.state = 754
self.alpha()
pass
elif token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 2)
self.state = 755
self.digit()
pass
elif token in [sdpParser.PLUS]:
self.enterOuterAlt(localctx, 3)
self.state = 756
self.match(sdpParser.PLUS)
pass
elif token in [sdpParser.SLASH]:
self.enterOuterAlt(localctx, 4)
self.state = 757
self.match(sdpParser.SLASH)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AttributeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def att_field(self):
return self.getTypedRuleContext(sdpParser.Att_fieldContext,0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def att_value(self):
return self.getTypedRuleContext(sdpParser.Att_valueContext,0)
def getRuleIndex(self):
return sdpParser.RULE_attribute
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAttribute" ):
listener.enterAttribute(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAttribute" ):
listener.exitAttribute(self)
def attribute(self):
localctx = sdpParser.AttributeContext(self, self._ctx, self.state)
self.enterRule(localctx, 84, self.RULE_attribute)
try:
self.state = 765
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,48,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 760
self.att_field()
self.state = 761
self.match(sdpParser.COLON)
self.state = 762
self.att_value()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 764
self.att_field()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Att_fieldContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token(self):
return self.getTypedRuleContext(sdpParser.TokenContext,0)
def getRuleIndex(self):
return sdpParser.RULE_att_field
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtt_field" ):
listener.enterAtt_field(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtt_field" ):
listener.exitAtt_field(self)
def att_field(self):
localctx = sdpParser.Att_fieldContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_att_field)
try:
self.enterOuterAlt(localctx, 1)
self.state = 767
self.token()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Att_valueContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def byte_string(self):
return self.getTypedRuleContext(sdpParser.Byte_stringContext,0)
def getRuleIndex(self):
return sdpParser.RULE_att_value
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtt_value" ):
listener.enterAtt_value(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtt_value" ):
listener.exitAtt_value(self)
def att_value(self):
localctx = sdpParser.Att_valueContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_att_value)
try:
self.enterOuterAlt(localctx, 1)
self.state = 769
self.byte_string()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MediaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token(self):
return self.getTypedRuleContext(sdpParser.TokenContext,0)
def getRuleIndex(self):
return sdpParser.RULE_media
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMedia" ):
listener.enterMedia(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMedia" ):
listener.exitMedia(self)
def media(self):
localctx = sdpParser.MediaContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_media)
try:
self.enterOuterAlt(localctx, 1)
self.state = 771
self.token()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FmtContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token(self):
return self.getTypedRuleContext(sdpParser.TokenContext,0)
def getRuleIndex(self):
return sdpParser.RULE_fmt
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFmt" ):
listener.enterFmt(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFmt" ):
listener.exitFmt(self)
def fmt(self):
localctx = sdpParser.FmtContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_fmt)
try:
self.enterOuterAlt(localctx, 1)
self.state = 773
self.token()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ProtoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.TokenContext)
else:
return self.getTypedRuleContext(sdpParser.TokenContext,i)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def getRuleIndex(self):
return sdpParser.RULE_proto
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProto" ):
listener.enterProto(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProto" ):
listener.exitProto(self)
def proto(self):
localctx = sdpParser.ProtoContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_proto)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 775
self.token()
self.state = 780
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.SLASH:
self.state = 776
self.match(sdpParser.SLASH)
self.state = 777
self.token()
self.state = 782
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PortContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_port
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPort" ):
listener.enterPort(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPort" ):
listener.exitPort(self)
def port(self):
localctx = sdpParser.PortContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_port)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 784
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 783
self.digit()
self.state = 786
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Unicast_addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ip4_address(self):
return self.getTypedRuleContext(sdpParser.Ip4_addressContext,0)
def ip6_address(self):
return self.getTypedRuleContext(sdpParser.Ip6_addressContext,0)
def fqdn(self):
return self.getTypedRuleContext(sdpParser.FqdnContext,0)
def extn_addr(self):
return self.getTypedRuleContext(sdpParser.Extn_addrContext,0)
def getRuleIndex(self):
return sdpParser.RULE_unicast_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnicast_address" ):
listener.enterUnicast_address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnicast_address" ):
listener.exitUnicast_address(self)
def unicast_address(self):
localctx = sdpParser.Unicast_addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_unicast_address)
try:
self.state = 792
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,51,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 788
self.ip4_address()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 789
self.ip6_address()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 790
self.fqdn()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 791
self.extn_addr()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Multicast_addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ip4_multicast(self):
return self.getTypedRuleContext(sdpParser.Ip4_multicastContext,0)
def ip6_multicast(self):
return self.getTypedRuleContext(sdpParser.Ip6_multicastContext,0)
def fqdn(self):
return self.getTypedRuleContext(sdpParser.FqdnContext,0)
def extn_addr(self):
return self.getTypedRuleContext(sdpParser.Extn_addrContext,0)
def getRuleIndex(self):
return sdpParser.RULE_multicast_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMulticast_address" ):
listener.enterMulticast_address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMulticast_address" ):
listener.exitMulticast_address(self)
def multicast_address(self):
localctx = sdpParser.Multicast_addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_multicast_address)
try:
self.state = 798
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,52,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 794
self.ip4_multicast()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 795
self.ip6_multicast()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 796
self.fqdn()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 797
self.extn_addr()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ip4_multicastContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def m1(self):
return self.getTypedRuleContext(sdpParser.M1Context,0)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def ttl(self):
return self.getTypedRuleContext(sdpParser.TtlContext,0)
def integer(self):
return self.getTypedRuleContext(sdpParser.IntegerContext,0)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def decimal_uchar(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Decimal_ucharContext)
else:
return self.getTypedRuleContext(sdpParser.Decimal_ucharContext,i)
def getRuleIndex(self):
return sdpParser.RULE_ip4_multicast
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIp4_multicast" ):
listener.enterIp4_multicast(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIp4_multicast" ):
listener.exitIp4_multicast(self)
def ip4_multicast(self):
localctx = sdpParser.Ip4_multicastContext(self, self._ctx, self.state)
self.enterRule(localctx, 102, self.RULE_ip4_multicast)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 800
self.m1()
self.state = 801
self.match(sdpParser.PERIOD)
self.state = 802
self.decimal_uchar()
self.state = 804
self.match(sdpParser.PERIOD)
self.state = 805
self.decimal_uchar()
self.state = 807
self.match(sdpParser.PERIOD)
self.state = 808
self.decimal_uchar()
self.state = 810
self.match(sdpParser.SLASH)
self.state = 811
self.ttl()
self.state = 814
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.SLASH:
self.state = 812
self.match(sdpParser.SLASH)
self.state = 813
self.integer()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class M1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def TWO(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.TWO)
else:
return self.getToken(sdpParser.TWO, i)
def digit(self):
return self.getTypedRuleContext(sdpParser.DigitContext,0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def getRuleIndex(self):
return sdpParser.RULE_m1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterM1" ):
listener.enterM1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitM1" ):
listener.exitM1(self)
def m1(self):
localctx = sdpParser.M1Context(self, self._ctx, self.state)
self.enterRule(localctx, 104, self.RULE_m1)
self._la = 0 # Token type
try:
self.state = 824
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,54,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 816
self.match(sdpParser.TWO)
self.state = 817
self.match(sdpParser.TWO)
self.state = 819
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 820
self.match(sdpParser.TWO)
self.state = 821
self.match(sdpParser.THREE)
self.state = 823
self.digit()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ip6_multicastContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def hexpart(self):
return self.getTypedRuleContext(sdpParser.HexpartContext,0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def integer(self):
return self.getTypedRuleContext(sdpParser.IntegerContext,0)
def getRuleIndex(self):
return sdpParser.RULE_ip6_multicast
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIp6_multicast" ):
listener.enterIp6_multicast(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIp6_multicast" ):
listener.exitIp6_multicast(self)
def ip6_multicast(self):
localctx = sdpParser.Ip6_multicastContext(self, self._ctx, self.state)
self.enterRule(localctx, 106, self.RULE_ip6_multicast)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 826
self.hexpart()
self.state = 829
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.SLASH:
self.state = 827
self.match(sdpParser.SLASH)
self.state = 828
self.integer()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TtlContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pos_digit(self):
return self.getTypedRuleContext(sdpParser.Pos_digitContext,0)
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def getRuleIndex(self):
return sdpParser.RULE_ttl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTtl" ):
listener.enterTtl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTtl" ):
listener.exitTtl(self)
def ttl(self):
localctx = sdpParser.TtlContext(self, self._ctx, self.state)
self.enterRule(localctx, 108, self.RULE_ttl)
self._la = 0 # Token type
try:
self.state = 841
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 1)
self.state = 831
self.pos_digit()
self.state = 838
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,57,self._ctx)
if la_ == 1:
self.state = 832
self.digit()
self.state = 833
self.digit()
pass
elif la_ == 2:
self.state = 836
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0):
self.state = 835
self.digit()
pass
pass
elif token in [sdpParser.ZERO]:
self.enterOuterAlt(localctx, 2)
self.state = 840
self.match(sdpParser.ZERO)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FqdnContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def alpha_numeric(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Alpha_numericContext)
else:
return self.getTypedRuleContext(sdpParser.Alpha_numericContext,i)
def DASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.DASH)
else:
return self.getToken(sdpParser.DASH, i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def getRuleIndex(self):
return sdpParser.RULE_fqdn
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFqdn" ):
listener.enterFqdn(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFqdn" ):
listener.exitFqdn(self)
def fqdn(self):
localctx = sdpParser.FqdnContext(self, self._ctx, self.state)
self.enterRule(localctx, 110, self.RULE_fqdn)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 846
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.state = 843
self.alpha_numeric()
pass
elif token in [sdpParser.DASH]:
self.state = 844
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.PERIOD]:
self.state = 845
self.match(sdpParser.PERIOD)
pass
else:
raise NoViableAltException(self)
self.state = 851
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.state = 848
self.alpha_numeric()
pass
elif token in [sdpParser.DASH]:
self.state = 849
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.PERIOD]:
self.state = 850
self.match(sdpParser.PERIOD)
pass
else:
raise NoViableAltException(self)
self.state = 856
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.state = 853
self.alpha_numeric()
pass
elif token in [sdpParser.DASH]:
self.state = 854
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.PERIOD]:
self.state = 855
self.match(sdpParser.PERIOD)
pass
else:
raise NoViableAltException(self)
self.state = 861
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 861
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.state = 858
self.alpha_numeric()
pass
elif token in [sdpParser.DASH]:
self.state = 859
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.PERIOD]:
self.state = 860
self.match(sdpParser.PERIOD)
pass
else:
raise NoViableAltException(self)
self.state = 863
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 69)) & ~0x3f) == 0 and ((1 << (_la - 69)) & ((1 << (sdpParser.A - 69)) | (1 << (sdpParser.B - 69)) | (1 << (sdpParser.C - 69)) | (1 << (sdpParser.D - 69)) | (1 << (sdpParser.E - 69)) | (1 << (sdpParser.F - 69)) | (1 << (sdpParser.G - 69)) | (1 << (sdpParser.H - 69)) | (1 << (sdpParser.I - 69)) | (1 << (sdpParser.J - 69)) | (1 << (sdpParser.K - 69)) | (1 << (sdpParser.L - 69)) | (1 << (sdpParser.M - 69)) | (1 << (sdpParser.N - 69)) | (1 << (sdpParser.O - 69)) | (1 << (sdpParser.P - 69)) | (1 << (sdpParser.Q - 69)) | (1 << (sdpParser.R - 69)) | (1 << (sdpParser.S - 69)) | (1 << (sdpParser.T - 69)) | (1 << (sdpParser.U - 69)) | (1 << (sdpParser.V - 69)) | (1 << (sdpParser.W - 69)) | (1 << (sdpParser.X - 69)) | (1 << (sdpParser.Y - 69)) | (1 << (sdpParser.Z - 69)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ip4_addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def b1(self):
return self.getTypedRuleContext(sdpParser.B1Context,0)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def decimal_uchar(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Decimal_ucharContext)
else:
return self.getTypedRuleContext(sdpParser.Decimal_ucharContext,i)
def getRuleIndex(self):
return sdpParser.RULE_ip4_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIp4_address" ):
listener.enterIp4_address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIp4_address" ):
listener.exitIp4_address(self)
def ip4_address(self):
localctx = sdpParser.Ip4_addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 112, self.RULE_ip4_address)
try:
self.enterOuterAlt(localctx, 1)
self.state = 865
self.b1()
self.state = 866
self.match(sdpParser.PERIOD)
self.state = 867
self.decimal_uchar()
self.state = 869
self.match(sdpParser.PERIOD)
self.state = 870
self.decimal_uchar()
self.state = 872
self.match(sdpParser.PERIOD)
self.state = 873
self.decimal_uchar()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class B1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def decimal_uchar(self):
return self.getTypedRuleContext(sdpParser.Decimal_ucharContext,0)
def getRuleIndex(self):
return sdpParser.RULE_b1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterB1" ):
listener.enterB1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitB1" ):
listener.exitB1(self)
def b1(self):
localctx = sdpParser.B1Context(self, self._ctx, self.state)
self.enterRule(localctx, 114, self.RULE_b1)
try:
self.enterOuterAlt(localctx, 1)
self.state = 875
self.decimal_uchar()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ip6_addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def hexpart(self):
return self.getTypedRuleContext(sdpParser.HexpartContext,0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def ip4_address(self):
return self.getTypedRuleContext(sdpParser.Ip4_addressContext,0)
def getRuleIndex(self):
return sdpParser.RULE_ip6_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIp6_address" ):
listener.enterIp6_address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIp6_address" ):
listener.exitIp6_address(self)
def ip6_address(self):
localctx = sdpParser.Ip6_addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 116, self.RULE_ip6_address)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 877
self.hexpart()
self.state = 880
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.COLON:
self.state = 878
self.match(sdpParser.COLON)
self.state = 879
self.ip4_address()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HexpartContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def hexseq(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.HexseqContext)
else:
return self.getTypedRuleContext(sdpParser.HexseqContext,i)
def COLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COLON)
else:
return self.getToken(sdpParser.COLON, i)
def getRuleIndex(self):
return sdpParser.RULE_hexpart
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHexpart" ):
listener.enterHexpart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHexpart" ):
listener.exitHexpart(self)
def hexpart(self):
localctx = sdpParser.HexpartContext(self, self._ctx, self.state)
self.enterRule(localctx, 118, self.RULE_hexpart)
self._la = 0 # Token type
try:
self.state = 896
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,67,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 882
self.hexseq()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 883
self.hexseq()
self.state = 884
self.match(sdpParser.COLON)
self.state = 885
self.match(sdpParser.COLON)
self.state = 888
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 887
self.hexseq()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 890
self.match(sdpParser.COLON)
self.state = 891
self.match(sdpParser.COLON)
self.state = 894
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 893
self.hexseq()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HexseqContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def hex4(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Hex4Context)
else:
return self.getTypedRuleContext(sdpParser.Hex4Context,i)
def COLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COLON)
else:
return self.getToken(sdpParser.COLON, i)
def getRuleIndex(self):
return sdpParser.RULE_hexseq
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHexseq" ):
listener.enterHexseq(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHexseq" ):
listener.exitHexseq(self)
def hexseq(self):
localctx = sdpParser.HexseqContext(self, self._ctx, self.state)
self.enterRule(localctx, 120, self.RULE_hexseq)
try:
self.enterOuterAlt(localctx, 1)
self.state = 898
self.hex4()
self.state = 903
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,68,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 899
self.match(sdpParser.COLON)
self.state = 900
self.hex4()
self.state = 905
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,68,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Hex4Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def hexdig(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.HexdigContext)
else:
return self.getTypedRuleContext(sdpParser.HexdigContext,i)
def getRuleIndex(self):
return sdpParser.RULE_hex4
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHex4" ):
listener.enterHex4(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHex4" ):
listener.exitHex4(self)
def hex4(self):
localctx = sdpParser.Hex4Context(self, self._ctx, self.state)
self.enterRule(localctx, 122, self.RULE_hex4)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 906
self.hexdig()
self.state = 917
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,70,self._ctx)
if la_ == 1:
self.state = 907
self.hexdig()
self.state = 908
self.hexdig()
self.state = 909
self.hexdig()
pass
elif la_ == 2:
self.state = 911
self.hexdig()
self.state = 912
self.hexdig()
pass
elif la_ == 3:
self.state = 915
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 914
self.hexdig()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Extn_addrContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def non_ws_string(self):
return self.getTypedRuleContext(sdpParser.Non_ws_stringContext,0)
def getRuleIndex(self):
return sdpParser.RULE_extn_addr
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExtn_addr" ):
listener.enterExtn_addr(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExtn_addr" ):
listener.exitExtn_addr(self)
def extn_addr(self):
localctx = sdpParser.Extn_addrContext(self, self._ctx, self.state)
self.enterRule(localctx, 124, self.RULE_extn_addr)
try:
self.enterOuterAlt(localctx, 1)
self.state = 919
self.non_ws_string()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def byte_string(self):
return self.getTypedRuleContext(sdpParser.Byte_stringContext,0)
def getRuleIndex(self):
return sdpParser.RULE_text
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterText" ):
listener.enterText(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitText" ):
listener.exitText(self)
def text(self):
localctx = sdpParser.TextContext(self, self._ctx, self.state)
self.enterRule(localctx, 126, self.RULE_text)
try:
self.enterOuterAlt(localctx, 1)
self.state = 921
self.byte_string()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Byte_stringContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U_0001(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0001)
else:
return self.getToken(sdpParser.U_0001, i)
def U_0002(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0002)
else:
return self.getToken(sdpParser.U_0002, i)
def U_0003(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0003)
else:
return self.getToken(sdpParser.U_0003, i)
def U_0004(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0004)
else:
return self.getToken(sdpParser.U_0004, i)
def U_0005(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0005)
else:
return self.getToken(sdpParser.U_0005, i)
def U_0006(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0006)
else:
return self.getToken(sdpParser.U_0006, i)
def U_0007(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0007)
else:
return self.getToken(sdpParser.U_0007, i)
def U_0008(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0008)
else:
return self.getToken(sdpParser.U_0008, i)
def TAB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.TAB)
else:
return self.getToken(sdpParser.TAB, i)
def U_000B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_000B)
else:
return self.getToken(sdpParser.U_000B, i)
def U_000C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_000C)
else:
return self.getToken(sdpParser.U_000C, i)
def U_000E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_000E)
else:
return self.getToken(sdpParser.U_000E, i)
def U_000F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_000F)
else:
return self.getToken(sdpParser.U_000F, i)
def U_0010(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0010)
else:
return self.getToken(sdpParser.U_0010, i)
def U_0011(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0011)
else:
return self.getToken(sdpParser.U_0011, i)
def U_0012(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0012)
else:
return self.getToken(sdpParser.U_0012, i)
def U_0013(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0013)
else:
return self.getToken(sdpParser.U_0013, i)
def U_0014(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0014)
else:
return self.getToken(sdpParser.U_0014, i)
def U_0015(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0015)
else:
return self.getToken(sdpParser.U_0015, i)
def U_0016(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0016)
else:
return self.getToken(sdpParser.U_0016, i)
def U_0017(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0017)
else:
return self.getToken(sdpParser.U_0017, i)
def U_0018(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0018)
else:
return self.getToken(sdpParser.U_0018, i)
def U_0019(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0019)
else:
return self.getToken(sdpParser.U_0019, i)
def U_001A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_001A)
else:
return self.getToken(sdpParser.U_001A, i)
def U_001B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_001B)
else:
return self.getToken(sdpParser.U_001B, i)
def U_001C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_001C)
else:
return self.getToken(sdpParser.U_001C, i)
def U_001D(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_001D)
else:
return self.getToken(sdpParser.U_001D, i)
def U_001E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_001E)
else:
return self.getToken(sdpParser.U_001E, i)
def U_001F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_001F)
else:
return self.getToken(sdpParser.U_001F, i)
def SPACE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SPACE)
else:
return self.getToken(sdpParser.SPACE, i)
def EXCLAMATION(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EXCLAMATION)
else:
return self.getToken(sdpParser.EXCLAMATION, i)
def QUOTE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.QUOTE)
else:
return self.getToken(sdpParser.QUOTE, i)
def HASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.HASH)
else:
return self.getToken(sdpParser.HASH, i)
def DOLLAR(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.DOLLAR)
else:
return self.getToken(sdpParser.DOLLAR, i)
def PERCENT(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERCENT)
else:
return self.getToken(sdpParser.PERCENT, i)
def AMPERSAND(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.AMPERSAND)
else:
return self.getToken(sdpParser.AMPERSAND, i)
def APOSTROPHE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.APOSTROPHE)
else:
return self.getToken(sdpParser.APOSTROPHE, i)
def LEFT_PAREN(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.LEFT_PAREN)
else:
return self.getToken(sdpParser.LEFT_PAREN, i)
def RIGHT_PAREN(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.RIGHT_PAREN)
else:
return self.getToken(sdpParser.RIGHT_PAREN, i)
def ASTERISK(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.ASTERISK)
else:
return self.getToken(sdpParser.ASTERISK, i)
def PLUS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PLUS)
else:
return self.getToken(sdpParser.PLUS, i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def DASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.DASH)
else:
return self.getToken(sdpParser.DASH, i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def ZERO(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.ZERO)
else:
return self.getToken(sdpParser.ZERO, i)
def ONE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.ONE)
else:
return self.getToken(sdpParser.ONE, i)
def TWO(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.TWO)
else:
return self.getToken(sdpParser.TWO, i)
def THREE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.THREE)
else:
return self.getToken(sdpParser.THREE, i)
def FOUR(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.FOUR)
else:
return self.getToken(sdpParser.FOUR, i)
def FIVE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.FIVE)
else:
return self.getToken(sdpParser.FIVE, i)
def SIX(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SIX)
else:
return self.getToken(sdpParser.SIX, i)
def SEVEN(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SEVEN)
else:
return self.getToken(sdpParser.SEVEN, i)
def EIGHT(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EIGHT)
else:
return self.getToken(sdpParser.EIGHT, i)
def NINE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.NINE)
else:
return self.getToken(sdpParser.NINE, i)
def COLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COLON)
else:
return self.getToken(sdpParser.COLON, i)
def SEMICOLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SEMICOLON)
else:
return self.getToken(sdpParser.SEMICOLON, i)
def LESS_THAN(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.LESS_THAN)
else:
return self.getToken(sdpParser.LESS_THAN, i)
def EQUALS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.EQUALS)
else:
return self.getToken(sdpParser.EQUALS, i)
def GREATER_THAN(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.GREATER_THAN)
else:
return self.getToken(sdpParser.GREATER_THAN, i)
def QUESTION(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.QUESTION)
else:
return self.getToken(sdpParser.QUESTION, i)
def AT(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.AT)
else:
return self.getToken(sdpParser.AT, i)
def CAP_A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_A)
else:
return self.getToken(sdpParser.CAP_A, i)
def CAP_B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_B)
else:
return self.getToken(sdpParser.CAP_B, i)
def CAP_C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_C)
else:
return self.getToken(sdpParser.CAP_C, i)
def CAP_D(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_D)
else:
return self.getToken(sdpParser.CAP_D, i)
def CAP_E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_E)
else:
return self.getToken(sdpParser.CAP_E, i)
def CAP_F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_F)
else:
return self.getToken(sdpParser.CAP_F, i)
def CAP_G(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_G)
else:
return self.getToken(sdpParser.CAP_G, i)
def CAP_H(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_H)
else:
return self.getToken(sdpParser.CAP_H, i)
def CAP_I(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_I)
else:
return self.getToken(sdpParser.CAP_I, i)
def CAP_J(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_J)
else:
return self.getToken(sdpParser.CAP_J, i)
def CAP_K(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_K)
else:
return self.getToken(sdpParser.CAP_K, i)
def CAP_L(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_L)
else:
return self.getToken(sdpParser.CAP_L, i)
def CAP_M(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_M)
else:
return self.getToken(sdpParser.CAP_M, i)
def CAP_N(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_N)
else:
return self.getToken(sdpParser.CAP_N, i)
def CAP_O(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_O)
else:
return self.getToken(sdpParser.CAP_O, i)
def CAP_P(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_P)
else:
return self.getToken(sdpParser.CAP_P, i)
def CAP_Q(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_Q)
else:
return self.getToken(sdpParser.CAP_Q, i)
def CAP_R(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_R)
else:
return self.getToken(sdpParser.CAP_R, i)
def CAP_S(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_S)
else:
return self.getToken(sdpParser.CAP_S, i)
def CAP_T(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_T)
else:
return self.getToken(sdpParser.CAP_T, i)
def CAP_U(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_U)
else:
return self.getToken(sdpParser.CAP_U, i)
def CAP_V(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_V)
else:
return self.getToken(sdpParser.CAP_V, i)
def CAP_W(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_W)
else:
return self.getToken(sdpParser.CAP_W, i)
def CAP_X(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_X)
else:
return self.getToken(sdpParser.CAP_X, i)
def CAP_Y(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_Y)
else:
return self.getToken(sdpParser.CAP_Y, i)
def CAP_Z(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CAP_Z)
else:
return self.getToken(sdpParser.CAP_Z, i)
def LEFT_BRACE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.LEFT_BRACE)
else:
return self.getToken(sdpParser.LEFT_BRACE, i)
def BACKSLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.BACKSLASH)
else:
return self.getToken(sdpParser.BACKSLASH, i)
def RIGHT_BRACE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.RIGHT_BRACE)
else:
return self.getToken(sdpParser.RIGHT_BRACE, i)
def CARAT(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.CARAT)
else:
return self.getToken(sdpParser.CARAT, i)
def UNDERSCORE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.UNDERSCORE)
else:
return self.getToken(sdpParser.UNDERSCORE, i)
def ACCENT(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.ACCENT)
else:
return self.getToken(sdpParser.ACCENT, i)
def A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.A)
else:
return self.getToken(sdpParser.A, i)
def B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.B)
else:
return self.getToken(sdpParser.B, i)
def C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.C)
else:
return self.getToken(sdpParser.C, i)
def D(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.D)
else:
return self.getToken(sdpParser.D, i)
def E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.E)
else:
return self.getToken(sdpParser.E, i)
def F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.F)
else:
return self.getToken(sdpParser.F, i)
def G(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.G)
else:
return self.getToken(sdpParser.G, i)
def H(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.H)
else:
return self.getToken(sdpParser.H, i)
def I(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.I)
else:
return self.getToken(sdpParser.I, i)
def J(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.J)
else:
return self.getToken(sdpParser.J, i)
def K(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.K)
else:
return self.getToken(sdpParser.K, i)
def L(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.L)
else:
return self.getToken(sdpParser.L, i)
def M(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.M)
else:
return self.getToken(sdpParser.M, i)
def N(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.N)
else:
return self.getToken(sdpParser.N, i)
def O(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.O)
else:
return self.getToken(sdpParser.O, i)
def P(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.P)
else:
return self.getToken(sdpParser.P, i)
def Q(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.Q)
else:
return self.getToken(sdpParser.Q, i)
def R(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.R)
else:
return self.getToken(sdpParser.R, i)
def S(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.S)
else:
return self.getToken(sdpParser.S, i)
def T(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.T)
else:
return self.getToken(sdpParser.T, i)
def U(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U)
else:
return self.getToken(sdpParser.U, i)
def V(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.V)
else:
return self.getToken(sdpParser.V, i)
def W(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.W)
else:
return self.getToken(sdpParser.W, i)
def X(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.X)
else:
return self.getToken(sdpParser.X, i)
def Y(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.Y)
else:
return self.getToken(sdpParser.Y, i)
def Z(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.Z)
else:
return self.getToken(sdpParser.Z, i)
def LEFT_CURLY_BRACE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.LEFT_CURLY_BRACE)
else:
return self.getToken(sdpParser.LEFT_CURLY_BRACE, i)
def PIPE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PIPE)
else:
return self.getToken(sdpParser.PIPE, i)
def RIGHT_CURLY_BRACE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.RIGHT_CURLY_BRACE)
else:
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, i)
def TILDE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.TILDE)
else:
return self.getToken(sdpParser.TILDE, i)
def U_007F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_007F)
else:
return self.getToken(sdpParser.U_007F, i)
def U_0080(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0080)
else:
return self.getToken(sdpParser.U_0080, i)
def U_0081(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0081)
else:
return self.getToken(sdpParser.U_0081, i)
def U_0082(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0082)
else:
return self.getToken(sdpParser.U_0082, i)
def U_0083(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0083)
else:
return self.getToken(sdpParser.U_0083, i)
def U_0084(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0084)
else:
return self.getToken(sdpParser.U_0084, i)
def U_0085(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0085)
else:
return self.getToken(sdpParser.U_0085, i)
def U_0086(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0086)
else:
return self.getToken(sdpParser.U_0086, i)
def U_0087(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0087)
else:
return self.getToken(sdpParser.U_0087, i)
def U_0088(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0088)
else:
return self.getToken(sdpParser.U_0088, i)
def U_0089(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0089)
else:
return self.getToken(sdpParser.U_0089, i)
def U_008A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008A)
else:
return self.getToken(sdpParser.U_008A, i)
def U_008B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008B)
else:
return self.getToken(sdpParser.U_008B, i)
def U_008C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008C)
else:
return self.getToken(sdpParser.U_008C, i)
def U_008D(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008D)
else:
return self.getToken(sdpParser.U_008D, i)
def U_008E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008E)
else:
return self.getToken(sdpParser.U_008E, i)
def U_008F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008F)
else:
return self.getToken(sdpParser.U_008F, i)
def U_0090(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0090)
else:
return self.getToken(sdpParser.U_0090, i)
def U_0091(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0091)
else:
return self.getToken(sdpParser.U_0091, i)
def U_0092(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0092)
else:
return self.getToken(sdpParser.U_0092, i)
def U_0093(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0093)
else:
return self.getToken(sdpParser.U_0093, i)
def U_0094(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0094)
else:
return self.getToken(sdpParser.U_0094, i)
def U_0095(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0095)
else:
return self.getToken(sdpParser.U_0095, i)
def U_0096(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0096)
else:
return self.getToken(sdpParser.U_0096, i)
def U_0097(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0097)
else:
return self.getToken(sdpParser.U_0097, i)
def U_0098(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0098)
else:
return self.getToken(sdpParser.U_0098, i)
def U_0099(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0099)
else:
return self.getToken(sdpParser.U_0099, i)
def U_009A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009A)
else:
return self.getToken(sdpParser.U_009A, i)
def U_009B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009B)
else:
return self.getToken(sdpParser.U_009B, i)
def U_009C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009C)
else:
return self.getToken(sdpParser.U_009C, i)
def U_009D(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009D)
else:
return self.getToken(sdpParser.U_009D, i)
def U_009E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009E)
else:
return self.getToken(sdpParser.U_009E, i)
def U_009F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009F)
else:
return self.getToken(sdpParser.U_009F, i)
def U_00A0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A0)
else:
return self.getToken(sdpParser.U_00A0, i)
def U_00A1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A1)
else:
return self.getToken(sdpParser.U_00A1, i)
def U_00A2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A2)
else:
return self.getToken(sdpParser.U_00A2, i)
def U_00A3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A3)
else:
return self.getToken(sdpParser.U_00A3, i)
def U_00A4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A4)
else:
return self.getToken(sdpParser.U_00A4, i)
def U_00A5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A5)
else:
return self.getToken(sdpParser.U_00A5, i)
def U_00A6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A6)
else:
return self.getToken(sdpParser.U_00A6, i)
def U_00A7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A7)
else:
return self.getToken(sdpParser.U_00A7, i)
def U_00A8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A8)
else:
return self.getToken(sdpParser.U_00A8, i)
def U_00A9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A9)
else:
return self.getToken(sdpParser.U_00A9, i)
def U_00AA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AA)
else:
return self.getToken(sdpParser.U_00AA, i)
def U_00AB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AB)
else:
return self.getToken(sdpParser.U_00AB, i)
def U_00AC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AC)
else:
return self.getToken(sdpParser.U_00AC, i)
def U_00AD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AD)
else:
return self.getToken(sdpParser.U_00AD, i)
def U_00AE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AE)
else:
return self.getToken(sdpParser.U_00AE, i)
def U_00AF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AF)
else:
return self.getToken(sdpParser.U_00AF, i)
def U_00B0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B0)
else:
return self.getToken(sdpParser.U_00B0, i)
def U_00B1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B1)
else:
return self.getToken(sdpParser.U_00B1, i)
def U_00B2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B2)
else:
return self.getToken(sdpParser.U_00B2, i)
def U_00B3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B3)
else:
return self.getToken(sdpParser.U_00B3, i)
def U_00B4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B4)
else:
return self.getToken(sdpParser.U_00B4, i)
def U_00B5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B5)
else:
return self.getToken(sdpParser.U_00B5, i)
def U_00B6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B6)
else:
return self.getToken(sdpParser.U_00B6, i)
def U_00B7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B7)
else:
return self.getToken(sdpParser.U_00B7, i)
def U_00B8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B8)
else:
return self.getToken(sdpParser.U_00B8, i)
def U_00B9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B9)
else:
return self.getToken(sdpParser.U_00B9, i)
def U_00BA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BA)
else:
return self.getToken(sdpParser.U_00BA, i)
def U_00BB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BB)
else:
return self.getToken(sdpParser.U_00BB, i)
def U_00BC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BC)
else:
return self.getToken(sdpParser.U_00BC, i)
def U_00BD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BD)
else:
return self.getToken(sdpParser.U_00BD, i)
def U_00BE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BE)
else:
return self.getToken(sdpParser.U_00BE, i)
def U_00BF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BF)
else:
return self.getToken(sdpParser.U_00BF, i)
def U_00C0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C0)
else:
return self.getToken(sdpParser.U_00C0, i)
def U_00C1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C1)
else:
return self.getToken(sdpParser.U_00C1, i)
def U_00C2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C2)
else:
return self.getToken(sdpParser.U_00C2, i)
def U_00C3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C3)
else:
return self.getToken(sdpParser.U_00C3, i)
def U_00C4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C4)
else:
return self.getToken(sdpParser.U_00C4, i)
def U_00C5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C5)
else:
return self.getToken(sdpParser.U_00C5, i)
def U_00C6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C6)
else:
return self.getToken(sdpParser.U_00C6, i)
def U_00C7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C7)
else:
return self.getToken(sdpParser.U_00C7, i)
def U_00C8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C8)
else:
return self.getToken(sdpParser.U_00C8, i)
def U_00C9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C9)
else:
return self.getToken(sdpParser.U_00C9, i)
def U_00CA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CA)
else:
return self.getToken(sdpParser.U_00CA, i)
def U_00CB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CB)
else:
return self.getToken(sdpParser.U_00CB, i)
def U_00CC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CC)
else:
return self.getToken(sdpParser.U_00CC, i)
def U_00CD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CD)
else:
return self.getToken(sdpParser.U_00CD, i)
def U_00CE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CE)
else:
return self.getToken(sdpParser.U_00CE, i)
def U_00CF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CF)
else:
return self.getToken(sdpParser.U_00CF, i)
def U_00D0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D0)
else:
return self.getToken(sdpParser.U_00D0, i)
def U_00D1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D1)
else:
return self.getToken(sdpParser.U_00D1, i)
def U_00D2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D2)
else:
return self.getToken(sdpParser.U_00D2, i)
def U_00D3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D3)
else:
return self.getToken(sdpParser.U_00D3, i)
def U_00D4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D4)
else:
return self.getToken(sdpParser.U_00D4, i)
def U_00D5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D5)
else:
return self.getToken(sdpParser.U_00D5, i)
def U_00D6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D6)
else:
return self.getToken(sdpParser.U_00D6, i)
def U_00D7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D7)
else:
return self.getToken(sdpParser.U_00D7, i)
def U_00D8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D8)
else:
return self.getToken(sdpParser.U_00D8, i)
def U_00D9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D9)
else:
return self.getToken(sdpParser.U_00D9, i)
def U_00DA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DA)
else:
return self.getToken(sdpParser.U_00DA, i)
def U_00DB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DB)
else:
return self.getToken(sdpParser.U_00DB, i)
def U_00DC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DC)
else:
return self.getToken(sdpParser.U_00DC, i)
def U_00DD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DD)
else:
return self.getToken(sdpParser.U_00DD, i)
def U_00DE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DE)
else:
return self.getToken(sdpParser.U_00DE, i)
def U_00DF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DF)
else:
return self.getToken(sdpParser.U_00DF, i)
def U_00E0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E0)
else:
return self.getToken(sdpParser.U_00E0, i)
def U_00E1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E1)
else:
return self.getToken(sdpParser.U_00E1, i)
def U_00E2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E2)
else:
return self.getToken(sdpParser.U_00E2, i)
def U_00E3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E3)
else:
return self.getToken(sdpParser.U_00E3, i)
def U_00E4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E4)
else:
return self.getToken(sdpParser.U_00E4, i)
def U_00E5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E5)
else:
return self.getToken(sdpParser.U_00E5, i)
def U_00E6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E6)
else:
return self.getToken(sdpParser.U_00E6, i)
def U_00E7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E7)
else:
return self.getToken(sdpParser.U_00E7, i)
def U_00E8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E8)
else:
return self.getToken(sdpParser.U_00E8, i)
def U_00E9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E9)
else:
return self.getToken(sdpParser.U_00E9, i)
def U_00EA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EA)
else:
return self.getToken(sdpParser.U_00EA, i)
def U_00EB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EB)
else:
return self.getToken(sdpParser.U_00EB, i)
def U_00EC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EC)
else:
return self.getToken(sdpParser.U_00EC, i)
def U_00ED(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00ED)
else:
return self.getToken(sdpParser.U_00ED, i)
def U_00EE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EE)
else:
return self.getToken(sdpParser.U_00EE, i)
def U_00EF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EF)
else:
return self.getToken(sdpParser.U_00EF, i)
def U_00F0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F0)
else:
return self.getToken(sdpParser.U_00F0, i)
def U_00F1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F1)
else:
return self.getToken(sdpParser.U_00F1, i)
def U_00F2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F2)
else:
return self.getToken(sdpParser.U_00F2, i)
def U_00F3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F3)
else:
return self.getToken(sdpParser.U_00F3, i)
def U_00F4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F4)
else:
return self.getToken(sdpParser.U_00F4, i)
def U_00F5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F5)
else:
return self.getToken(sdpParser.U_00F5, i)
def U_00F6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F6)
else:
return self.getToken(sdpParser.U_00F6, i)
def U_00F7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F7)
else:
return self.getToken(sdpParser.U_00F7, i)
def U_00F8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F8)
else:
return self.getToken(sdpParser.U_00F8, i)
def U_00F9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F9)
else:
return self.getToken(sdpParser.U_00F9, i)
def U_00FA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FA)
else:
return self.getToken(sdpParser.U_00FA, i)
def U_00FB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FB)
else:
return self.getToken(sdpParser.U_00FB, i)
def U_00FC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FC)
else:
return self.getToken(sdpParser.U_00FC, i)
def U_00FD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FD)
else:
return self.getToken(sdpParser.U_00FD, i)
def U_00FE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FE)
else:
return self.getToken(sdpParser.U_00FE, i)
def U_00FF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FF)
else:
return self.getToken(sdpParser.U_00FF, i)
def getRuleIndex(self):
return sdpParser.RULE_byte_string
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterByte_string" ):
listener.enterByte_string(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitByte_string" ):
listener.exitByte_string(self)
def byte_string(self):
localctx = sdpParser.Byte_stringContext(self, self._ctx, self.state)
self.enterRule(localctx, 128, self.RULE_byte_string)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 926
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 926
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.TAB, sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008]:
self.state = 923
_la = self._input.LA(1)
if not(_la==sdpParser.TAB or ((((_la - 100)) & ~0x3f) == 0 and ((1 << (_la - 100)) & ((1 << (sdpParser.U_0001 - 100)) | (1 << (sdpParser.U_0002 - 100)) | (1 << (sdpParser.U_0003 - 100)) | (1 << (sdpParser.U_0004 - 100)) | (1 << (sdpParser.U_0005 - 100)) | (1 << (sdpParser.U_0006 - 100)) | (1 << (sdpParser.U_0007 - 100)) | (1 << (sdpParser.U_0008 - 100)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.U_000B, sdpParser.U_000C]:
self.state = 924
_la = self._input.LA(1)
if not(_la==sdpParser.U_000B or _la==sdpParser.U_000C):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.SPACE, sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE, sdpParser.BACKSLASH, sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F, sdpParser.U_0080, sdpParser.U_0081, sdpParser.U_0082, sdpParser.U_0083, sdpParser.U_0084, sdpParser.U_0085, sdpParser.U_0086, sdpParser.U_0087, sdpParser.U_0088, sdpParser.U_0089, sdpParser.U_008A, sdpParser.U_008B, sdpParser.U_008C, sdpParser.U_008D, sdpParser.U_008E, sdpParser.U_008F, sdpParser.U_0090, sdpParser.U_0091, sdpParser.U_0092, sdpParser.U_0093, sdpParser.U_0094, sdpParser.U_0095, sdpParser.U_0096, sdpParser.U_0097, sdpParser.U_0098, sdpParser.U_0099, sdpParser.U_009A, sdpParser.U_009B, sdpParser.U_009C, sdpParser.U_009D, sdpParser.U_009E, sdpParser.U_009F, sdpParser.U_00A0, sdpParser.U_00A1, sdpParser.U_00A2, sdpParser.U_00A3, sdpParser.U_00A4, sdpParser.U_00A5, sdpParser.U_00A6, sdpParser.U_00A7, sdpParser.U_00A8, sdpParser.U_00A9, sdpParser.U_00AA, sdpParser.U_00AB, sdpParser.U_00AC, sdpParser.U_00AD, sdpParser.U_00AE, sdpParser.U_00AF, sdpParser.U_00B0, sdpParser.U_00B1, sdpParser.U_00B2, sdpParser.U_00B3, sdpParser.U_00B4, sdpParser.U_00B5, sdpParser.U_00B6, sdpParser.U_00B7, sdpParser.U_00B8, sdpParser.U_00B9, sdpParser.U_00BA, sdpParser.U_00BB, sdpParser.U_00BC, sdpParser.U_00BD, sdpParser.U_00BE, sdpParser.U_00BF, sdpParser.U_00C0, sdpParser.U_00C1, sdpParser.U_00C2, sdpParser.U_00C3, sdpParser.U_00C4, sdpParser.U_00C5, sdpParser.U_00C6, sdpParser.U_00C7, sdpParser.U_00C8, sdpParser.U_00C9, sdpParser.U_00CA, sdpParser.U_00CB, sdpParser.U_00CC, sdpParser.U_00CD, sdpParser.U_00CE, sdpParser.U_00CF, sdpParser.U_00D0, sdpParser.U_00D1, sdpParser.U_00D2, sdpParser.U_00D3, sdpParser.U_00D4, sdpParser.U_00D5, sdpParser.U_00D6, sdpParser.U_00D7, sdpParser.U_00D8, sdpParser.U_00D9, sdpParser.U_00DA, sdpParser.U_00DB, sdpParser.U_00DC, sdpParser.U_00DD, sdpParser.U_00DE, sdpParser.U_00DF, sdpParser.U_00E0, sdpParser.U_00E1, sdpParser.U_00E2, sdpParser.U_00E3, sdpParser.U_00E4, sdpParser.U_00E5, sdpParser.U_00E6, sdpParser.U_00E7, sdpParser.U_00E8, sdpParser.U_00E9, sdpParser.U_00EA, sdpParser.U_00EB, sdpParser.U_00EC, sdpParser.U_00ED, sdpParser.U_00EE, sdpParser.U_00EF, sdpParser.U_00F0, sdpParser.U_00F1, sdpParser.U_00F2, sdpParser.U_00F3, sdpParser.U_00F4, sdpParser.U_00F5, sdpParser.U_00F6, sdpParser.U_00F7, sdpParser.U_00F8, sdpParser.U_00F9, sdpParser.U_00FA, sdpParser.U_00FB, sdpParser.U_00FC, sdpParser.U_00FD, sdpParser.U_00FE, sdpParser.U_00FF]:
self.state = 925
_la = self._input.LA(1)
if not(((((_la - 4)) & ~0x3f) == 0 and ((1 << (_la - 4)) & ((1 << (sdpParser.SPACE - 4)) | (1 << (sdpParser.EXCLAMATION - 4)) | (1 << (sdpParser.QUOTE - 4)) | (1 << (sdpParser.HASH - 4)) | (1 << (sdpParser.DOLLAR - 4)) | (1 << (sdpParser.PERCENT - 4)) | (1 << (sdpParser.AMPERSAND - 4)) | (1 << (sdpParser.APOSTROPHE - 4)) | (1 << (sdpParser.LEFT_PAREN - 4)) | (1 << (sdpParser.RIGHT_PAREN - 4)) | (1 << (sdpParser.ASTERISK - 4)) | (1 << (sdpParser.PLUS - 4)) | (1 << (sdpParser.COMMA - 4)) | (1 << (sdpParser.DASH - 4)) | (1 << (sdpParser.PERIOD - 4)) | (1 << (sdpParser.SLASH - 4)) | (1 << (sdpParser.ZERO - 4)) | (1 << (sdpParser.ONE - 4)) | (1 << (sdpParser.TWO - 4)) | (1 << (sdpParser.THREE - 4)) | (1 << (sdpParser.FOUR - 4)) | (1 << (sdpParser.FIVE - 4)) | (1 << (sdpParser.SIX - 4)) | (1 << (sdpParser.SEVEN - 4)) | (1 << (sdpParser.EIGHT - 4)) | (1 << (sdpParser.NINE - 4)) | (1 << (sdpParser.COLON - 4)) | (1 << (sdpParser.SEMICOLON - 4)) | (1 << (sdpParser.LESS_THAN - 4)) | (1 << (sdpParser.EQUALS - 4)) | (1 << (sdpParser.GREATER_THAN - 4)) | (1 << (sdpParser.QUESTION - 4)) | (1 << (sdpParser.AT - 4)) | (1 << (sdpParser.CAP_A - 4)) | (1 << (sdpParser.CAP_B - 4)) | (1 << (sdpParser.CAP_C - 4)) | (1 << (sdpParser.CAP_D - 4)) | (1 << (sdpParser.CAP_E - 4)) | (1 << (sdpParser.CAP_F - 4)) | (1 << (sdpParser.CAP_G - 4)) | (1 << (sdpParser.CAP_H - 4)) | (1 << (sdpParser.CAP_I - 4)) | (1 << (sdpParser.CAP_J - 4)) | (1 << (sdpParser.CAP_K - 4)) | (1 << (sdpParser.CAP_L - 4)) | (1 << (sdpParser.CAP_M - 4)) | (1 << (sdpParser.CAP_N - 4)) | (1 << (sdpParser.CAP_O - 4)) | (1 << (sdpParser.CAP_P - 4)) | (1 << (sdpParser.CAP_Q - 4)) | (1 << (sdpParser.CAP_R - 4)) | (1 << (sdpParser.CAP_S - 4)) | (1 << (sdpParser.CAP_T - 4)) | (1 << (sdpParser.CAP_U - 4)) | (1 << (sdpParser.CAP_V - 4)) | (1 << (sdpParser.CAP_W - 4)) | (1 << (sdpParser.CAP_X - 4)) | (1 << (sdpParser.CAP_Y - 4)) | (1 << (sdpParser.CAP_Z - 4)) | (1 << (sdpParser.LEFT_BRACE - 4)) | (1 << (sdpParser.BACKSLASH - 4)) | (1 << (sdpParser.RIGHT_BRACE - 4)) | (1 << (sdpParser.CARAT - 4)) | (1 << (sdpParser.UNDERSCORE - 4)))) != 0) or ((((_la - 68)) & ~0x3f) == 0 and ((1 << (_la - 68)) & ((1 << (sdpParser.ACCENT - 68)) | (1 << (sdpParser.A - 68)) | (1 << (sdpParser.B - 68)) | (1 << (sdpParser.C - 68)) | (1 << (sdpParser.D - 68)) | (1 << (sdpParser.E - 68)) | (1 << (sdpParser.F - 68)) | (1 << (sdpParser.G - 68)) | (1 << (sdpParser.H - 68)) | (1 << (sdpParser.I - 68)) | (1 << (sdpParser.J - 68)) | (1 << (sdpParser.K - 68)) | (1 << (sdpParser.L - 68)) | (1 << (sdpParser.M - 68)) | (1 << (sdpParser.N - 68)) | (1 << (sdpParser.O - 68)) | (1 << (sdpParser.P - 68)) | (1 << (sdpParser.Q - 68)) | (1 << (sdpParser.R - 68)) | (1 << (sdpParser.S - 68)) | (1 << (sdpParser.T - 68)) | (1 << (sdpParser.U - 68)) | (1 << (sdpParser.V - 68)) | (1 << (sdpParser.W - 68)) | (1 << (sdpParser.X - 68)) | (1 << (sdpParser.Y - 68)) | (1 << (sdpParser.Z - 68)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 68)) | (1 << (sdpParser.PIPE - 68)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 68)) | (1 << (sdpParser.TILDE - 68)) | (1 << (sdpParser.U_000E - 68)) | (1 << (sdpParser.U_000F - 68)) | (1 << (sdpParser.U_0010 - 68)) | (1 << (sdpParser.U_0011 - 68)) | (1 << (sdpParser.U_0012 - 68)) | (1 << (sdpParser.U_0013 - 68)) | (1 << (sdpParser.U_0014 - 68)) | (1 << (sdpParser.U_0015 - 68)) | (1 << (sdpParser.U_0016 - 68)) | (1 << (sdpParser.U_0017 - 68)) | (1 << (sdpParser.U_0018 - 68)) | (1 << (sdpParser.U_0019 - 68)) | (1 << (sdpParser.U_001A - 68)) | (1 << (sdpParser.U_001B - 68)) | (1 << (sdpParser.U_001C - 68)) | (1 << (sdpParser.U_001D - 68)) | (1 << (sdpParser.U_001E - 68)) | (1 << (sdpParser.U_001F - 68)) | (1 << (sdpParser.U_007F - 68)) | (1 << (sdpParser.U_0080 - 68)) | (1 << (sdpParser.U_0081 - 68)) | (1 << (sdpParser.U_0082 - 68)))) != 0) or ((((_la - 132)) & ~0x3f) == 0 and ((1 << (_la - 132)) & ((1 << (sdpParser.U_0083 - 132)) | (1 << (sdpParser.U_0084 - 132)) | (1 << (sdpParser.U_0085 - 132)) | (1 << (sdpParser.U_0086 - 132)) | (1 << (sdpParser.U_0087 - 132)) | (1 << (sdpParser.U_0088 - 132)) | (1 << (sdpParser.U_0089 - 132)) | (1 << (sdpParser.U_008A - 132)) | (1 << (sdpParser.U_008B - 132)) | (1 << (sdpParser.U_008C - 132)) | (1 << (sdpParser.U_008D - 132)) | (1 << (sdpParser.U_008E - 132)) | (1 << (sdpParser.U_008F - 132)) | (1 << (sdpParser.U_0090 - 132)) | (1 << (sdpParser.U_0091 - 132)) | (1 << (sdpParser.U_0092 - 132)) | (1 << (sdpParser.U_0093 - 132)) | (1 << (sdpParser.U_0094 - 132)) | (1 << (sdpParser.U_0095 - 132)) | (1 << (sdpParser.U_0096 - 132)) | (1 << (sdpParser.U_0097 - 132)) | (1 << (sdpParser.U_0098 - 132)) | (1 << (sdpParser.U_0099 - 132)) | (1 << (sdpParser.U_009A - 132)) | (1 << (sdpParser.U_009B - 132)) | (1 << (sdpParser.U_009C - 132)) | (1 << (sdpParser.U_009D - 132)) | (1 << (sdpParser.U_009E - 132)) | (1 << (sdpParser.U_009F - 132)) | (1 << (sdpParser.U_00A0 - 132)) | (1 << (sdpParser.U_00A1 - 132)) | (1 << (sdpParser.U_00A2 - 132)) | (1 << (sdpParser.U_00A3 - 132)) | (1 << (sdpParser.U_00A4 - 132)) | (1 << (sdpParser.U_00A5 - 132)) | (1 << (sdpParser.U_00A6 - 132)) | (1 << (sdpParser.U_00A7 - 132)) | (1 << (sdpParser.U_00A8 - 132)) | (1 << (sdpParser.U_00A9 - 132)) | (1 << (sdpParser.U_00AA - 132)) | (1 << (sdpParser.U_00AB - 132)) | (1 << (sdpParser.U_00AC - 132)) | (1 << (sdpParser.U_00AD - 132)) | (1 << (sdpParser.U_00AE - 132)) | (1 << (sdpParser.U_00AF - 132)) | (1 << (sdpParser.U_00B0 - 132)) | (1 << (sdpParser.U_00B1 - 132)) | (1 << (sdpParser.U_00B2 - 132)) | (1 << (sdpParser.U_00B3 - 132)) | (1 << (sdpParser.U_00B4 - 132)) | (1 << (sdpParser.U_00B5 - 132)) | (1 << (sdpParser.U_00B6 - 132)) | (1 << (sdpParser.U_00B7 - 132)) | (1 << (sdpParser.U_00B8 - 132)) | (1 << (sdpParser.U_00B9 - 132)) | (1 << (sdpParser.U_00BA - 132)) | (1 << (sdpParser.U_00BB - 132)) | (1 << (sdpParser.U_00BC - 132)) | (1 << (sdpParser.U_00BD - 132)) | (1 << (sdpParser.U_00BE - 132)) | (1 << (sdpParser.U_00BF - 132)) | (1 << (sdpParser.U_00C0 - 132)) | (1 << (sdpParser.U_00C1 - 132)) | (1 << (sdpParser.U_00C2 - 132)))) != 0) or ((((_la - 196)) & ~0x3f) == 0 and ((1 << (_la - 196)) & ((1 << (sdpParser.U_00C3 - 196)) | (1 << (sdpParser.U_00C4 - 196)) | (1 << (sdpParser.U_00C5 - 196)) | (1 << (sdpParser.U_00C6 - 196)) | (1 << (sdpParser.U_00C7 - 196)) | (1 << (sdpParser.U_00C8 - 196)) | (1 << (sdpParser.U_00C9 - 196)) | (1 << (sdpParser.U_00CA - 196)) | (1 << (sdpParser.U_00CB - 196)) | (1 << (sdpParser.U_00CC - 196)) | (1 << (sdpParser.U_00CD - 196)) | (1 << (sdpParser.U_00CE - 196)) | (1 << (sdpParser.U_00CF - 196)) | (1 << (sdpParser.U_00D0 - 196)) | (1 << (sdpParser.U_00D1 - 196)) | (1 << (sdpParser.U_00D2 - 196)) | (1 << (sdpParser.U_00D3 - 196)) | (1 << (sdpParser.U_00D4 - 196)) | (1 << (sdpParser.U_00D5 - 196)) | (1 << (sdpParser.U_00D6 - 196)) | (1 << (sdpParser.U_00D7 - 196)) | (1 << (sdpParser.U_00D8 - 196)) | (1 << (sdpParser.U_00D9 - 196)) | (1 << (sdpParser.U_00DA - 196)) | (1 << (sdpParser.U_00DB - 196)) | (1 << (sdpParser.U_00DC - 196)) | (1 << (sdpParser.U_00DD - 196)) | (1 << (sdpParser.U_00DE - 196)) | (1 << (sdpParser.U_00DF - 196)) | (1 << (sdpParser.U_00E0 - 196)) | (1 << (sdpParser.U_00E1 - 196)) | (1 << (sdpParser.U_00E2 - 196)) | (1 << (sdpParser.U_00E3 - 196)) | (1 << (sdpParser.U_00E4 - 196)) | (1 << (sdpParser.U_00E5 - 196)) | (1 << (sdpParser.U_00E6 - 196)) | (1 << (sdpParser.U_00E7 - 196)) | (1 << (sdpParser.U_00E8 - 196)) | (1 << (sdpParser.U_00E9 - 196)) | (1 << (sdpParser.U_00EA - 196)) | (1 << (sdpParser.U_00EB - 196)) | (1 << (sdpParser.U_00EC - 196)) | (1 << (sdpParser.U_00ED - 196)) | (1 << (sdpParser.U_00EE - 196)) | (1 << (sdpParser.U_00EF - 196)) | (1 << (sdpParser.U_00F0 - 196)) | (1 << (sdpParser.U_00F1 - 196)) | (1 << (sdpParser.U_00F2 - 196)) | (1 << (sdpParser.U_00F3 - 196)) | (1 << (sdpParser.U_00F4 - 196)) | (1 << (sdpParser.U_00F5 - 196)) | (1 << (sdpParser.U_00F6 - 196)) | (1 << (sdpParser.U_00F7 - 196)) | (1 << (sdpParser.U_00F8 - 196)) | (1 << (sdpParser.U_00F9 - 196)) | (1 << (sdpParser.U_00FA - 196)) | (1 << (sdpParser.U_00FB - 196)) | (1 << (sdpParser.U_00FC - 196)) | (1 << (sdpParser.U_00FD - 196)) | (1 << (sdpParser.U_00FE - 196)) | (1 << (sdpParser.U_00FF - 196)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
else:
raise NoViableAltException(self)
self.state = 928
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 1)) & ~0x3f) == 0 and ((1 << (_la - 1)) & ((1 << (sdpParser.TAB - 1)) | (1 << (sdpParser.SPACE - 1)) | (1 << (sdpParser.EXCLAMATION - 1)) | (1 << (sdpParser.QUOTE - 1)) | (1 << (sdpParser.HASH - 1)) | (1 << (sdpParser.DOLLAR - 1)) | (1 << (sdpParser.PERCENT - 1)) | (1 << (sdpParser.AMPERSAND - 1)) | (1 << (sdpParser.APOSTROPHE - 1)) | (1 << (sdpParser.LEFT_PAREN - 1)) | (1 << (sdpParser.RIGHT_PAREN - 1)) | (1 << (sdpParser.ASTERISK - 1)) | (1 << (sdpParser.PLUS - 1)) | (1 << (sdpParser.COMMA - 1)) | (1 << (sdpParser.DASH - 1)) | (1 << (sdpParser.PERIOD - 1)) | (1 << (sdpParser.SLASH - 1)) | (1 << (sdpParser.ZERO - 1)) | (1 << (sdpParser.ONE - 1)) | (1 << (sdpParser.TWO - 1)) | (1 << (sdpParser.THREE - 1)) | (1 << (sdpParser.FOUR - 1)) | (1 << (sdpParser.FIVE - 1)) | (1 << (sdpParser.SIX - 1)) | (1 << (sdpParser.SEVEN - 1)) | (1 << (sdpParser.EIGHT - 1)) | (1 << (sdpParser.NINE - 1)) | (1 << (sdpParser.COLON - 1)) | (1 << (sdpParser.SEMICOLON - 1)) | (1 << (sdpParser.LESS_THAN - 1)) | (1 << (sdpParser.EQUALS - 1)) | (1 << (sdpParser.GREATER_THAN - 1)) | (1 << (sdpParser.QUESTION - 1)) | (1 << (sdpParser.AT - 1)) | (1 << (sdpParser.CAP_A - 1)) | (1 << (sdpParser.CAP_B - 1)) | (1 << (sdpParser.CAP_C - 1)) | (1 << (sdpParser.CAP_D - 1)) | (1 << (sdpParser.CAP_E - 1)) | (1 << (sdpParser.CAP_F - 1)) | (1 << (sdpParser.CAP_G - 1)) | (1 << (sdpParser.CAP_H - 1)) | (1 << (sdpParser.CAP_I - 1)) | (1 << (sdpParser.CAP_J - 1)) | (1 << (sdpParser.CAP_K - 1)) | (1 << (sdpParser.CAP_L - 1)) | (1 << (sdpParser.CAP_M - 1)) | (1 << (sdpParser.CAP_N - 1)) | (1 << (sdpParser.CAP_O - 1)) | (1 << (sdpParser.CAP_P - 1)) | (1 << (sdpParser.CAP_Q - 1)) | (1 << (sdpParser.CAP_R - 1)) | (1 << (sdpParser.CAP_S - 1)) | (1 << (sdpParser.CAP_T - 1)) | (1 << (sdpParser.CAP_U - 1)) | (1 << (sdpParser.CAP_V - 1)) | (1 << (sdpParser.CAP_W - 1)) | (1 << (sdpParser.CAP_X - 1)) | (1 << (sdpParser.CAP_Y - 1)) | (1 << (sdpParser.CAP_Z - 1)) | (1 << (sdpParser.LEFT_BRACE - 1)) | (1 << (sdpParser.BACKSLASH - 1)))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)) | (1 << (sdpParser.U_0001 - 65)) | (1 << (sdpParser.U_0002 - 65)) | (1 << (sdpParser.U_0003 - 65)) | (1 << (sdpParser.U_0004 - 65)) | (1 << (sdpParser.U_0005 - 65)) | (1 << (sdpParser.U_0006 - 65)) | (1 << (sdpParser.U_0007 - 65)) | (1 << (sdpParser.U_0008 - 65)) | (1 << (sdpParser.U_000B - 65)) | (1 << (sdpParser.U_000C - 65)) | (1 << (sdpParser.U_000E - 65)) | (1 << (sdpParser.U_000F - 65)) | (1 << (sdpParser.U_0010 - 65)) | (1 << (sdpParser.U_0011 - 65)) | (1 << (sdpParser.U_0012 - 65)) | (1 << (sdpParser.U_0013 - 65)) | (1 << (sdpParser.U_0014 - 65)) | (1 << (sdpParser.U_0015 - 65)) | (1 << (sdpParser.U_0016 - 65)) | (1 << (sdpParser.U_0017 - 65)) | (1 << (sdpParser.U_0018 - 65)) | (1 << (sdpParser.U_0019 - 65)) | (1 << (sdpParser.U_001A - 65)) | (1 << (sdpParser.U_001B - 65)) | (1 << (sdpParser.U_001C - 65)) | (1 << (sdpParser.U_001D - 65)) | (1 << (sdpParser.U_001E - 65)) | (1 << (sdpParser.U_001F - 65)) | (1 << (sdpParser.U_007F - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (sdpParser.U_0080 - 129)) | (1 << (sdpParser.U_0081 - 129)) | (1 << (sdpParser.U_0082 - 129)) | (1 << (sdpParser.U_0083 - 129)) | (1 << (sdpParser.U_0084 - 129)) | (1 << (sdpParser.U_0085 - 129)) | (1 << (sdpParser.U_0086 - 129)) | (1 << (sdpParser.U_0087 - 129)) | (1 << (sdpParser.U_0088 - 129)) | (1 << (sdpParser.U_0089 - 129)) | (1 << (sdpParser.U_008A - 129)) | (1 << (sdpParser.U_008B - 129)) | (1 << (sdpParser.U_008C - 129)) | (1 << (sdpParser.U_008D - 129)) | (1 << (sdpParser.U_008E - 129)) | (1 << (sdpParser.U_008F - 129)) | (1 << (sdpParser.U_0090 - 129)) | (1 << (sdpParser.U_0091 - 129)) | (1 << (sdpParser.U_0092 - 129)) | (1 << (sdpParser.U_0093 - 129)) | (1 << (sdpParser.U_0094 - 129)) | (1 << (sdpParser.U_0095 - 129)) | (1 << (sdpParser.U_0096 - 129)) | (1 << (sdpParser.U_0097 - 129)) | (1 << (sdpParser.U_0098 - 129)) | (1 << (sdpParser.U_0099 - 129)) | (1 << (sdpParser.U_009A - 129)) | (1 << (sdpParser.U_009B - 129)) | (1 << (sdpParser.U_009C - 129)) | (1 << (sdpParser.U_009D - 129)) | (1 << (sdpParser.U_009E - 129)) | (1 << (sdpParser.U_009F - 129)) | (1 << (sdpParser.U_00A0 - 129)) | (1 << (sdpParser.U_00A1 - 129)) | (1 << (sdpParser.U_00A2 - 129)) | (1 << (sdpParser.U_00A3 - 129)) | (1 << (sdpParser.U_00A4 - 129)) | (1 << (sdpParser.U_00A5 - 129)) | (1 << (sdpParser.U_00A6 - 129)) | (1 << (sdpParser.U_00A7 - 129)) | (1 << (sdpParser.U_00A8 - 129)) | (1 << (sdpParser.U_00A9 - 129)) | (1 << (sdpParser.U_00AA - 129)) | (1 << (sdpParser.U_00AB - 129)) | (1 << (sdpParser.U_00AC - 129)) | (1 << (sdpParser.U_00AD - 129)) | (1 << (sdpParser.U_00AE - 129)) | (1 << (sdpParser.U_00AF - 129)) | (1 << (sdpParser.U_00B0 - 129)) | (1 << (sdpParser.U_00B1 - 129)) | (1 << (sdpParser.U_00B2 - 129)) | (1 << (sdpParser.U_00B3 - 129)) | (1 << (sdpParser.U_00B4 - 129)) | (1 << (sdpParser.U_00B5 - 129)) | (1 << (sdpParser.U_00B6 - 129)) | (1 << (sdpParser.U_00B7 - 129)) | (1 << (sdpParser.U_00B8 - 129)) | (1 << (sdpParser.U_00B9 - 129)) | (1 << (sdpParser.U_00BA - 129)) | (1 << (sdpParser.U_00BB - 129)) | (1 << (sdpParser.U_00BC - 129)) | (1 << (sdpParser.U_00BD - 129)) | (1 << (sdpParser.U_00BE - 129)) | (1 << (sdpParser.U_00BF - 129)))) != 0) or ((((_la - 193)) & ~0x3f) == 0 and ((1 << (_la - 193)) & ((1 << (sdpParser.U_00C0 - 193)) | (1 << (sdpParser.U_00C1 - 193)) | (1 << (sdpParser.U_00C2 - 193)) | (1 << (sdpParser.U_00C3 - 193)) | (1 << (sdpParser.U_00C4 - 193)) | (1 << (sdpParser.U_00C5 - 193)) | (1 << (sdpParser.U_00C6 - 193)) | (1 << (sdpParser.U_00C7 - 193)) | (1 << (sdpParser.U_00C8 - 193)) | (1 << (sdpParser.U_00C9 - 193)) | (1 << (sdpParser.U_00CA - 193)) | (1 << (sdpParser.U_00CB - 193)) | (1 << (sdpParser.U_00CC - 193)) | (1 << (sdpParser.U_00CD - 193)) | (1 << (sdpParser.U_00CE - 193)) | (1 << (sdpParser.U_00CF - 193)) | (1 << (sdpParser.U_00D0 - 193)) | (1 << (sdpParser.U_00D1 - 193)) | (1 << (sdpParser.U_00D2 - 193)) | (1 << (sdpParser.U_00D3 - 193)) | (1 << (sdpParser.U_00D4 - 193)) | (1 << (sdpParser.U_00D5 - 193)) | (1 << (sdpParser.U_00D6 - 193)) | (1 << (sdpParser.U_00D7 - 193)) | (1 << (sdpParser.U_00D8 - 193)) | (1 << (sdpParser.U_00D9 - 193)) | (1 << (sdpParser.U_00DA - 193)) | (1 << (sdpParser.U_00DB - 193)) | (1 << (sdpParser.U_00DC - 193)) | (1 << (sdpParser.U_00DD - 193)) | (1 << (sdpParser.U_00DE - 193)) | (1 << (sdpParser.U_00DF - 193)) | (1 << (sdpParser.U_00E0 - 193)) | (1 << (sdpParser.U_00E1 - 193)) | (1 << (sdpParser.U_00E2 - 193)) | (1 << (sdpParser.U_00E3 - 193)) | (1 << (sdpParser.U_00E4 - 193)) | (1 << (sdpParser.U_00E5 - 193)) | (1 << (sdpParser.U_00E6 - 193)) | (1 << (sdpParser.U_00E7 - 193)) | (1 << (sdpParser.U_00E8 - 193)) | (1 << (sdpParser.U_00E9 - 193)) | (1 << (sdpParser.U_00EA - 193)) | (1 << (sdpParser.U_00EB - 193)) | (1 << (sdpParser.U_00EC - 193)) | (1 << (sdpParser.U_00ED - 193)) | (1 << (sdpParser.U_00EE - 193)) | (1 << (sdpParser.U_00EF - 193)) | (1 << (sdpParser.U_00F0 - 193)) | (1 << (sdpParser.U_00F1 - 193)) | (1 << (sdpParser.U_00F2 - 193)) | (1 << (sdpParser.U_00F3 - 193)) | (1 << (sdpParser.U_00F4 - 193)) | (1 << (sdpParser.U_00F5 - 193)) | (1 << (sdpParser.U_00F6 - 193)) | (1 << (sdpParser.U_00F7 - 193)) | (1 << (sdpParser.U_00F8 - 193)) | (1 << (sdpParser.U_00F9 - 193)) | (1 << (sdpParser.U_00FA - 193)) | (1 << (sdpParser.U_00FB - 193)) | (1 << (sdpParser.U_00FC - 193)) | (1 << (sdpParser.U_00FD - 193)) | (1 << (sdpParser.U_00FE - 193)) | (1 << (sdpParser.U_00FF - 193)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Non_ws_stringContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def vchar(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.VcharContext)
else:
return self.getTypedRuleContext(sdpParser.VcharContext,i)
def U_0080(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0080)
else:
return self.getToken(sdpParser.U_0080, i)
def U_0081(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0081)
else:
return self.getToken(sdpParser.U_0081, i)
def U_0082(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0082)
else:
return self.getToken(sdpParser.U_0082, i)
def U_0083(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0083)
else:
return self.getToken(sdpParser.U_0083, i)
def U_0084(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0084)
else:
return self.getToken(sdpParser.U_0084, i)
def U_0085(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0085)
else:
return self.getToken(sdpParser.U_0085, i)
def U_0086(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0086)
else:
return self.getToken(sdpParser.U_0086, i)
def U_0087(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0087)
else:
return self.getToken(sdpParser.U_0087, i)
def U_0088(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0088)
else:
return self.getToken(sdpParser.U_0088, i)
def U_0089(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0089)
else:
return self.getToken(sdpParser.U_0089, i)
def U_008A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008A)
else:
return self.getToken(sdpParser.U_008A, i)
def U_008B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008B)
else:
return self.getToken(sdpParser.U_008B, i)
def U_008C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008C)
else:
return self.getToken(sdpParser.U_008C, i)
def U_008D(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008D)
else:
return self.getToken(sdpParser.U_008D, i)
def U_008E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008E)
else:
return self.getToken(sdpParser.U_008E, i)
def U_008F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_008F)
else:
return self.getToken(sdpParser.U_008F, i)
def U_0090(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0090)
else:
return self.getToken(sdpParser.U_0090, i)
def U_0091(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0091)
else:
return self.getToken(sdpParser.U_0091, i)
def U_0092(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0092)
else:
return self.getToken(sdpParser.U_0092, i)
def U_0093(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0093)
else:
return self.getToken(sdpParser.U_0093, i)
def U_0094(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0094)
else:
return self.getToken(sdpParser.U_0094, i)
def U_0095(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0095)
else:
return self.getToken(sdpParser.U_0095, i)
def U_0096(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0096)
else:
return self.getToken(sdpParser.U_0096, i)
def U_0097(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0097)
else:
return self.getToken(sdpParser.U_0097, i)
def U_0098(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0098)
else:
return self.getToken(sdpParser.U_0098, i)
def U_0099(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_0099)
else:
return self.getToken(sdpParser.U_0099, i)
def U_009A(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009A)
else:
return self.getToken(sdpParser.U_009A, i)
def U_009B(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009B)
else:
return self.getToken(sdpParser.U_009B, i)
def U_009C(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009C)
else:
return self.getToken(sdpParser.U_009C, i)
def U_009D(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009D)
else:
return self.getToken(sdpParser.U_009D, i)
def U_009E(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009E)
else:
return self.getToken(sdpParser.U_009E, i)
def U_009F(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_009F)
else:
return self.getToken(sdpParser.U_009F, i)
def U_00A0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A0)
else:
return self.getToken(sdpParser.U_00A0, i)
def U_00A1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A1)
else:
return self.getToken(sdpParser.U_00A1, i)
def U_00A2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A2)
else:
return self.getToken(sdpParser.U_00A2, i)
def U_00A3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A3)
else:
return self.getToken(sdpParser.U_00A3, i)
def U_00A4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A4)
else:
return self.getToken(sdpParser.U_00A4, i)
def U_00A5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A5)
else:
return self.getToken(sdpParser.U_00A5, i)
def U_00A6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A6)
else:
return self.getToken(sdpParser.U_00A6, i)
def U_00A7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A7)
else:
return self.getToken(sdpParser.U_00A7, i)
def U_00A8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A8)
else:
return self.getToken(sdpParser.U_00A8, i)
def U_00A9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00A9)
else:
return self.getToken(sdpParser.U_00A9, i)
def U_00AA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AA)
else:
return self.getToken(sdpParser.U_00AA, i)
def U_00AB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AB)
else:
return self.getToken(sdpParser.U_00AB, i)
def U_00AC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AC)
else:
return self.getToken(sdpParser.U_00AC, i)
def U_00AD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AD)
else:
return self.getToken(sdpParser.U_00AD, i)
def U_00AE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AE)
else:
return self.getToken(sdpParser.U_00AE, i)
def U_00AF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00AF)
else:
return self.getToken(sdpParser.U_00AF, i)
def U_00B0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B0)
else:
return self.getToken(sdpParser.U_00B0, i)
def U_00B1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B1)
else:
return self.getToken(sdpParser.U_00B1, i)
def U_00B2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B2)
else:
return self.getToken(sdpParser.U_00B2, i)
def U_00B3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B3)
else:
return self.getToken(sdpParser.U_00B3, i)
def U_00B4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B4)
else:
return self.getToken(sdpParser.U_00B4, i)
def U_00B5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B5)
else:
return self.getToken(sdpParser.U_00B5, i)
def U_00B6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B6)
else:
return self.getToken(sdpParser.U_00B6, i)
def U_00B7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B7)
else:
return self.getToken(sdpParser.U_00B7, i)
def U_00B8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B8)
else:
return self.getToken(sdpParser.U_00B8, i)
def U_00B9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00B9)
else:
return self.getToken(sdpParser.U_00B9, i)
def U_00BA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BA)
else:
return self.getToken(sdpParser.U_00BA, i)
def U_00BB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BB)
else:
return self.getToken(sdpParser.U_00BB, i)
def U_00BC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BC)
else:
return self.getToken(sdpParser.U_00BC, i)
def U_00BD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BD)
else:
return self.getToken(sdpParser.U_00BD, i)
def U_00BE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BE)
else:
return self.getToken(sdpParser.U_00BE, i)
def U_00BF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00BF)
else:
return self.getToken(sdpParser.U_00BF, i)
def U_00C0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C0)
else:
return self.getToken(sdpParser.U_00C0, i)
def U_00C1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C1)
else:
return self.getToken(sdpParser.U_00C1, i)
def U_00C2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C2)
else:
return self.getToken(sdpParser.U_00C2, i)
def U_00C3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C3)
else:
return self.getToken(sdpParser.U_00C3, i)
def U_00C4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C4)
else:
return self.getToken(sdpParser.U_00C4, i)
def U_00C5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C5)
else:
return self.getToken(sdpParser.U_00C5, i)
def U_00C6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C6)
else:
return self.getToken(sdpParser.U_00C6, i)
def U_00C7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C7)
else:
return self.getToken(sdpParser.U_00C7, i)
def U_00C8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C8)
else:
return self.getToken(sdpParser.U_00C8, i)
def U_00C9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00C9)
else:
return self.getToken(sdpParser.U_00C9, i)
def U_00CA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CA)
else:
return self.getToken(sdpParser.U_00CA, i)
def U_00CB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CB)
else:
return self.getToken(sdpParser.U_00CB, i)
def U_00CC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CC)
else:
return self.getToken(sdpParser.U_00CC, i)
def U_00CD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CD)
else:
return self.getToken(sdpParser.U_00CD, i)
def U_00CE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CE)
else:
return self.getToken(sdpParser.U_00CE, i)
def U_00CF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00CF)
else:
return self.getToken(sdpParser.U_00CF, i)
def U_00D0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D0)
else:
return self.getToken(sdpParser.U_00D0, i)
def U_00D1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D1)
else:
return self.getToken(sdpParser.U_00D1, i)
def U_00D2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D2)
else:
return self.getToken(sdpParser.U_00D2, i)
def U_00D3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D3)
else:
return self.getToken(sdpParser.U_00D3, i)
def U_00D4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D4)
else:
return self.getToken(sdpParser.U_00D4, i)
def U_00D5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D5)
else:
return self.getToken(sdpParser.U_00D5, i)
def U_00D6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D6)
else:
return self.getToken(sdpParser.U_00D6, i)
def U_00D7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D7)
else:
return self.getToken(sdpParser.U_00D7, i)
def U_00D8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D8)
else:
return self.getToken(sdpParser.U_00D8, i)
def U_00D9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00D9)
else:
return self.getToken(sdpParser.U_00D9, i)
def U_00DA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DA)
else:
return self.getToken(sdpParser.U_00DA, i)
def U_00DB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DB)
else:
return self.getToken(sdpParser.U_00DB, i)
def U_00DC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DC)
else:
return self.getToken(sdpParser.U_00DC, i)
def U_00DD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DD)
else:
return self.getToken(sdpParser.U_00DD, i)
def U_00DE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DE)
else:
return self.getToken(sdpParser.U_00DE, i)
def U_00DF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00DF)
else:
return self.getToken(sdpParser.U_00DF, i)
def U_00E0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E0)
else:
return self.getToken(sdpParser.U_00E0, i)
def U_00E1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E1)
else:
return self.getToken(sdpParser.U_00E1, i)
def U_00E2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E2)
else:
return self.getToken(sdpParser.U_00E2, i)
def U_00E3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E3)
else:
return self.getToken(sdpParser.U_00E3, i)
def U_00E4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E4)
else:
return self.getToken(sdpParser.U_00E4, i)
def U_00E5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E5)
else:
return self.getToken(sdpParser.U_00E5, i)
def U_00E6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E6)
else:
return self.getToken(sdpParser.U_00E6, i)
def U_00E7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E7)
else:
return self.getToken(sdpParser.U_00E7, i)
def U_00E8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E8)
else:
return self.getToken(sdpParser.U_00E8, i)
def U_00E9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00E9)
else:
return self.getToken(sdpParser.U_00E9, i)
def U_00EA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EA)
else:
return self.getToken(sdpParser.U_00EA, i)
def U_00EB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EB)
else:
return self.getToken(sdpParser.U_00EB, i)
def U_00EC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EC)
else:
return self.getToken(sdpParser.U_00EC, i)
def U_00ED(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00ED)
else:
return self.getToken(sdpParser.U_00ED, i)
def U_00EE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EE)
else:
return self.getToken(sdpParser.U_00EE, i)
def U_00EF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00EF)
else:
return self.getToken(sdpParser.U_00EF, i)
def U_00F0(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F0)
else:
return self.getToken(sdpParser.U_00F0, i)
def U_00F1(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F1)
else:
return self.getToken(sdpParser.U_00F1, i)
def U_00F2(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F2)
else:
return self.getToken(sdpParser.U_00F2, i)
def U_00F3(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F3)
else:
return self.getToken(sdpParser.U_00F3, i)
def U_00F4(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F4)
else:
return self.getToken(sdpParser.U_00F4, i)
def U_00F5(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F5)
else:
return self.getToken(sdpParser.U_00F5, i)
def U_00F6(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F6)
else:
return self.getToken(sdpParser.U_00F6, i)
def U_00F7(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F7)
else:
return self.getToken(sdpParser.U_00F7, i)
def U_00F8(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F8)
else:
return self.getToken(sdpParser.U_00F8, i)
def U_00F9(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00F9)
else:
return self.getToken(sdpParser.U_00F9, i)
def U_00FA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FA)
else:
return self.getToken(sdpParser.U_00FA, i)
def U_00FB(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FB)
else:
return self.getToken(sdpParser.U_00FB, i)
def U_00FC(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FC)
else:
return self.getToken(sdpParser.U_00FC, i)
def U_00FD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FD)
else:
return self.getToken(sdpParser.U_00FD, i)
def U_00FE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FE)
else:
return self.getToken(sdpParser.U_00FE, i)
def U_00FF(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.U_00FF)
else:
return self.getToken(sdpParser.U_00FF, i)
def getRuleIndex(self):
return sdpParser.RULE_non_ws_string
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNon_ws_string" ):
listener.enterNon_ws_string(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNon_ws_string" ):
listener.exitNon_ws_string(self)
def non_ws_string(self):
localctx = sdpParser.Non_ws_stringContext(self, self._ctx, self.state)
self.enterRule(localctx, 130, self.RULE_non_ws_string)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 932
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 932
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE, sdpParser.BACKSLASH, sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE]:
self.state = 930
self.vchar()
pass
elif token in [sdpParser.U_0080, sdpParser.U_0081, sdpParser.U_0082, sdpParser.U_0083, sdpParser.U_0084, sdpParser.U_0085, sdpParser.U_0086, sdpParser.U_0087, sdpParser.U_0088, sdpParser.U_0089, sdpParser.U_008A, sdpParser.U_008B, sdpParser.U_008C, sdpParser.U_008D, sdpParser.U_008E, sdpParser.U_008F, sdpParser.U_0090, sdpParser.U_0091, sdpParser.U_0092, sdpParser.U_0093, sdpParser.U_0094, sdpParser.U_0095, sdpParser.U_0096, sdpParser.U_0097, sdpParser.U_0098, sdpParser.U_0099, sdpParser.U_009A, sdpParser.U_009B, sdpParser.U_009C, sdpParser.U_009D, sdpParser.U_009E, sdpParser.U_009F, sdpParser.U_00A0, sdpParser.U_00A1, sdpParser.U_00A2, sdpParser.U_00A3, sdpParser.U_00A4, sdpParser.U_00A5, sdpParser.U_00A6, sdpParser.U_00A7, sdpParser.U_00A8, sdpParser.U_00A9, sdpParser.U_00AA, sdpParser.U_00AB, sdpParser.U_00AC, sdpParser.U_00AD, sdpParser.U_00AE, sdpParser.U_00AF, sdpParser.U_00B0, sdpParser.U_00B1, sdpParser.U_00B2, sdpParser.U_00B3, sdpParser.U_00B4, sdpParser.U_00B5, sdpParser.U_00B6, sdpParser.U_00B7, sdpParser.U_00B8, sdpParser.U_00B9, sdpParser.U_00BA, sdpParser.U_00BB, sdpParser.U_00BC, sdpParser.U_00BD, sdpParser.U_00BE, sdpParser.U_00BF, sdpParser.U_00C0, sdpParser.U_00C1, sdpParser.U_00C2, sdpParser.U_00C3, sdpParser.U_00C4, sdpParser.U_00C5, sdpParser.U_00C6, sdpParser.U_00C7, sdpParser.U_00C8, sdpParser.U_00C9, sdpParser.U_00CA, sdpParser.U_00CB, sdpParser.U_00CC, sdpParser.U_00CD, sdpParser.U_00CE, sdpParser.U_00CF, sdpParser.U_00D0, sdpParser.U_00D1, sdpParser.U_00D2, sdpParser.U_00D3, sdpParser.U_00D4, sdpParser.U_00D5, sdpParser.U_00D6, sdpParser.U_00D7, sdpParser.U_00D8, sdpParser.U_00D9, sdpParser.U_00DA, sdpParser.U_00DB, sdpParser.U_00DC, sdpParser.U_00DD, sdpParser.U_00DE, sdpParser.U_00DF, sdpParser.U_00E0, sdpParser.U_00E1, sdpParser.U_00E2, sdpParser.U_00E3, sdpParser.U_00E4, sdpParser.U_00E5, sdpParser.U_00E6, sdpParser.U_00E7, sdpParser.U_00E8, sdpParser.U_00E9, sdpParser.U_00EA, sdpParser.U_00EB, sdpParser.U_00EC, sdpParser.U_00ED, sdpParser.U_00EE, sdpParser.U_00EF, sdpParser.U_00F0, sdpParser.U_00F1, sdpParser.U_00F2, sdpParser.U_00F3, sdpParser.U_00F4, sdpParser.U_00F5, sdpParser.U_00F6, sdpParser.U_00F7, sdpParser.U_00F8, sdpParser.U_00F9, sdpParser.U_00FA, sdpParser.U_00FB, sdpParser.U_00FC, sdpParser.U_00FD, sdpParser.U_00FE, sdpParser.U_00FF]:
self.state = 931
_la = self._input.LA(1)
if not(((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (sdpParser.U_0080 - 129)) | (1 << (sdpParser.U_0081 - 129)) | (1 << (sdpParser.U_0082 - 129)) | (1 << (sdpParser.U_0083 - 129)) | (1 << (sdpParser.U_0084 - 129)) | (1 << (sdpParser.U_0085 - 129)) | (1 << (sdpParser.U_0086 - 129)) | (1 << (sdpParser.U_0087 - 129)) | (1 << (sdpParser.U_0088 - 129)) | (1 << (sdpParser.U_0089 - 129)) | (1 << (sdpParser.U_008A - 129)) | (1 << (sdpParser.U_008B - 129)) | (1 << (sdpParser.U_008C - 129)) | (1 << (sdpParser.U_008D - 129)) | (1 << (sdpParser.U_008E - 129)) | (1 << (sdpParser.U_008F - 129)) | (1 << (sdpParser.U_0090 - 129)) | (1 << (sdpParser.U_0091 - 129)) | (1 << (sdpParser.U_0092 - 129)) | (1 << (sdpParser.U_0093 - 129)) | (1 << (sdpParser.U_0094 - 129)) | (1 << (sdpParser.U_0095 - 129)) | (1 << (sdpParser.U_0096 - 129)) | (1 << (sdpParser.U_0097 - 129)) | (1 << (sdpParser.U_0098 - 129)) | (1 << (sdpParser.U_0099 - 129)) | (1 << (sdpParser.U_009A - 129)) | (1 << (sdpParser.U_009B - 129)) | (1 << (sdpParser.U_009C - 129)) | (1 << (sdpParser.U_009D - 129)) | (1 << (sdpParser.U_009E - 129)) | (1 << (sdpParser.U_009F - 129)) | (1 << (sdpParser.U_00A0 - 129)) | (1 << (sdpParser.U_00A1 - 129)) | (1 << (sdpParser.U_00A2 - 129)) | (1 << (sdpParser.U_00A3 - 129)) | (1 << (sdpParser.U_00A4 - 129)) | (1 << (sdpParser.U_00A5 - 129)) | (1 << (sdpParser.U_00A6 - 129)) | (1 << (sdpParser.U_00A7 - 129)) | (1 << (sdpParser.U_00A8 - 129)) | (1 << (sdpParser.U_00A9 - 129)) | (1 << (sdpParser.U_00AA - 129)) | (1 << (sdpParser.U_00AB - 129)) | (1 << (sdpParser.U_00AC - 129)) | (1 << (sdpParser.U_00AD - 129)) | (1 << (sdpParser.U_00AE - 129)) | (1 << (sdpParser.U_00AF - 129)) | (1 << (sdpParser.U_00B0 - 129)) | (1 << (sdpParser.U_00B1 - 129)) | (1 << (sdpParser.U_00B2 - 129)) | (1 << (sdpParser.U_00B3 - 129)) | (1 << (sdpParser.U_00B4 - 129)) | (1 << (sdpParser.U_00B5 - 129)) | (1 << (sdpParser.U_00B6 - 129)) | (1 << (sdpParser.U_00B7 - 129)) | (1 << (sdpParser.U_00B8 - 129)) | (1 << (sdpParser.U_00B9 - 129)) | (1 << (sdpParser.U_00BA - 129)) | (1 << (sdpParser.U_00BB - 129)) | (1 << (sdpParser.U_00BC - 129)) | (1 << (sdpParser.U_00BD - 129)) | (1 << (sdpParser.U_00BE - 129)) | (1 << (sdpParser.U_00BF - 129)))) != 0) or ((((_la - 193)) & ~0x3f) == 0 and ((1 << (_la - 193)) & ((1 << (sdpParser.U_00C0 - 193)) | (1 << (sdpParser.U_00C1 - 193)) | (1 << (sdpParser.U_00C2 - 193)) | (1 << (sdpParser.U_00C3 - 193)) | (1 << (sdpParser.U_00C4 - 193)) | (1 << (sdpParser.U_00C5 - 193)) | (1 << (sdpParser.U_00C6 - 193)) | (1 << (sdpParser.U_00C7 - 193)) | (1 << (sdpParser.U_00C8 - 193)) | (1 << (sdpParser.U_00C9 - 193)) | (1 << (sdpParser.U_00CA - 193)) | (1 << (sdpParser.U_00CB - 193)) | (1 << (sdpParser.U_00CC - 193)) | (1 << (sdpParser.U_00CD - 193)) | (1 << (sdpParser.U_00CE - 193)) | (1 << (sdpParser.U_00CF - 193)) | (1 << (sdpParser.U_00D0 - 193)) | (1 << (sdpParser.U_00D1 - 193)) | (1 << (sdpParser.U_00D2 - 193)) | (1 << (sdpParser.U_00D3 - 193)) | (1 << (sdpParser.U_00D4 - 193)) | (1 << (sdpParser.U_00D5 - 193)) | (1 << (sdpParser.U_00D6 - 193)) | (1 << (sdpParser.U_00D7 - 193)) | (1 << (sdpParser.U_00D8 - 193)) | (1 << (sdpParser.U_00D9 - 193)) | (1 << (sdpParser.U_00DA - 193)) | (1 << (sdpParser.U_00DB - 193)) | (1 << (sdpParser.U_00DC - 193)) | (1 << (sdpParser.U_00DD - 193)) | (1 << (sdpParser.U_00DE - 193)) | (1 << (sdpParser.U_00DF - 193)) | (1 << (sdpParser.U_00E0 - 193)) | (1 << (sdpParser.U_00E1 - 193)) | (1 << (sdpParser.U_00E2 - 193)) | (1 << (sdpParser.U_00E3 - 193)) | (1 << (sdpParser.U_00E4 - 193)) | (1 << (sdpParser.U_00E5 - 193)) | (1 << (sdpParser.U_00E6 - 193)) | (1 << (sdpParser.U_00E7 - 193)) | (1 << (sdpParser.U_00E8 - 193)) | (1 << (sdpParser.U_00E9 - 193)) | (1 << (sdpParser.U_00EA - 193)) | (1 << (sdpParser.U_00EB - 193)) | (1 << (sdpParser.U_00EC - 193)) | (1 << (sdpParser.U_00ED - 193)) | (1 << (sdpParser.U_00EE - 193)) | (1 << (sdpParser.U_00EF - 193)) | (1 << (sdpParser.U_00F0 - 193)) | (1 << (sdpParser.U_00F1 - 193)) | (1 << (sdpParser.U_00F2 - 193)) | (1 << (sdpParser.U_00F3 - 193)) | (1 << (sdpParser.U_00F4 - 193)) | (1 << (sdpParser.U_00F5 - 193)) | (1 << (sdpParser.U_00F6 - 193)) | (1 << (sdpParser.U_00F7 - 193)) | (1 << (sdpParser.U_00F8 - 193)) | (1 << (sdpParser.U_00F9 - 193)) | (1 << (sdpParser.U_00FA - 193)) | (1 << (sdpParser.U_00FB - 193)) | (1 << (sdpParser.U_00FC - 193)) | (1 << (sdpParser.U_00FD - 193)) | (1 << (sdpParser.U_00FE - 193)) | (1 << (sdpParser.U_00FF - 193)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
else:
raise NoViableAltException(self)
self.state = 934
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.QUOTE) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.LESS_THAN) | (1 << sdpParser.EQUALS) | (1 << sdpParser.GREATER_THAN) | (1 << sdpParser.QUESTION) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z) | (1 << sdpParser.LEFT_BRACE))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (sdpParser.BACKSLASH - 64)) | (1 << (sdpParser.RIGHT_BRACE - 64)) | (1 << (sdpParser.CARAT - 64)) | (1 << (sdpParser.UNDERSCORE - 64)) | (1 << (sdpParser.ACCENT - 64)) | (1 << (sdpParser.A - 64)) | (1 << (sdpParser.B - 64)) | (1 << (sdpParser.C - 64)) | (1 << (sdpParser.D - 64)) | (1 << (sdpParser.E - 64)) | (1 << (sdpParser.F - 64)) | (1 << (sdpParser.G - 64)) | (1 << (sdpParser.H - 64)) | (1 << (sdpParser.I - 64)) | (1 << (sdpParser.J - 64)) | (1 << (sdpParser.K - 64)) | (1 << (sdpParser.L - 64)) | (1 << (sdpParser.M - 64)) | (1 << (sdpParser.N - 64)) | (1 << (sdpParser.O - 64)) | (1 << (sdpParser.P - 64)) | (1 << (sdpParser.Q - 64)) | (1 << (sdpParser.R - 64)) | (1 << (sdpParser.S - 64)) | (1 << (sdpParser.T - 64)) | (1 << (sdpParser.U - 64)) | (1 << (sdpParser.V - 64)) | (1 << (sdpParser.W - 64)) | (1 << (sdpParser.X - 64)) | (1 << (sdpParser.Y - 64)) | (1 << (sdpParser.Z - 64)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 64)) | (1 << (sdpParser.PIPE - 64)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 64)) | (1 << (sdpParser.TILDE - 64)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (sdpParser.U_0080 - 129)) | (1 << (sdpParser.U_0081 - 129)) | (1 << (sdpParser.U_0082 - 129)) | (1 << (sdpParser.U_0083 - 129)) | (1 << (sdpParser.U_0084 - 129)) | (1 << (sdpParser.U_0085 - 129)) | (1 << (sdpParser.U_0086 - 129)) | (1 << (sdpParser.U_0087 - 129)) | (1 << (sdpParser.U_0088 - 129)) | (1 << (sdpParser.U_0089 - 129)) | (1 << (sdpParser.U_008A - 129)) | (1 << (sdpParser.U_008B - 129)) | (1 << (sdpParser.U_008C - 129)) | (1 << (sdpParser.U_008D - 129)) | (1 << (sdpParser.U_008E - 129)) | (1 << (sdpParser.U_008F - 129)) | (1 << (sdpParser.U_0090 - 129)) | (1 << (sdpParser.U_0091 - 129)) | (1 << (sdpParser.U_0092 - 129)) | (1 << (sdpParser.U_0093 - 129)) | (1 << (sdpParser.U_0094 - 129)) | (1 << (sdpParser.U_0095 - 129)) | (1 << (sdpParser.U_0096 - 129)) | (1 << (sdpParser.U_0097 - 129)) | (1 << (sdpParser.U_0098 - 129)) | (1 << (sdpParser.U_0099 - 129)) | (1 << (sdpParser.U_009A - 129)) | (1 << (sdpParser.U_009B - 129)) | (1 << (sdpParser.U_009C - 129)) | (1 << (sdpParser.U_009D - 129)) | (1 << (sdpParser.U_009E - 129)) | (1 << (sdpParser.U_009F - 129)) | (1 << (sdpParser.U_00A0 - 129)) | (1 << (sdpParser.U_00A1 - 129)) | (1 << (sdpParser.U_00A2 - 129)) | (1 << (sdpParser.U_00A3 - 129)) | (1 << (sdpParser.U_00A4 - 129)) | (1 << (sdpParser.U_00A5 - 129)) | (1 << (sdpParser.U_00A6 - 129)) | (1 << (sdpParser.U_00A7 - 129)) | (1 << (sdpParser.U_00A8 - 129)) | (1 << (sdpParser.U_00A9 - 129)) | (1 << (sdpParser.U_00AA - 129)) | (1 << (sdpParser.U_00AB - 129)) | (1 << (sdpParser.U_00AC - 129)) | (1 << (sdpParser.U_00AD - 129)) | (1 << (sdpParser.U_00AE - 129)) | (1 << (sdpParser.U_00AF - 129)) | (1 << (sdpParser.U_00B0 - 129)) | (1 << (sdpParser.U_00B1 - 129)) | (1 << (sdpParser.U_00B2 - 129)) | (1 << (sdpParser.U_00B3 - 129)) | (1 << (sdpParser.U_00B4 - 129)) | (1 << (sdpParser.U_00B5 - 129)) | (1 << (sdpParser.U_00B6 - 129)) | (1 << (sdpParser.U_00B7 - 129)) | (1 << (sdpParser.U_00B8 - 129)) | (1 << (sdpParser.U_00B9 - 129)) | (1 << (sdpParser.U_00BA - 129)) | (1 << (sdpParser.U_00BB - 129)) | (1 << (sdpParser.U_00BC - 129)) | (1 << (sdpParser.U_00BD - 129)) | (1 << (sdpParser.U_00BE - 129)) | (1 << (sdpParser.U_00BF - 129)))) != 0) or ((((_la - 193)) & ~0x3f) == 0 and ((1 << (_la - 193)) & ((1 << (sdpParser.U_00C0 - 193)) | (1 << (sdpParser.U_00C1 - 193)) | (1 << (sdpParser.U_00C2 - 193)) | (1 << (sdpParser.U_00C3 - 193)) | (1 << (sdpParser.U_00C4 - 193)) | (1 << (sdpParser.U_00C5 - 193)) | (1 << (sdpParser.U_00C6 - 193)) | (1 << (sdpParser.U_00C7 - 193)) | (1 << (sdpParser.U_00C8 - 193)) | (1 << (sdpParser.U_00C9 - 193)) | (1 << (sdpParser.U_00CA - 193)) | (1 << (sdpParser.U_00CB - 193)) | (1 << (sdpParser.U_00CC - 193)) | (1 << (sdpParser.U_00CD - 193)) | (1 << (sdpParser.U_00CE - 193)) | (1 << (sdpParser.U_00CF - 193)) | (1 << (sdpParser.U_00D0 - 193)) | (1 << (sdpParser.U_00D1 - 193)) | (1 << (sdpParser.U_00D2 - 193)) | (1 << (sdpParser.U_00D3 - 193)) | (1 << (sdpParser.U_00D4 - 193)) | (1 << (sdpParser.U_00D5 - 193)) | (1 << (sdpParser.U_00D6 - 193)) | (1 << (sdpParser.U_00D7 - 193)) | (1 << (sdpParser.U_00D8 - 193)) | (1 << (sdpParser.U_00D9 - 193)) | (1 << (sdpParser.U_00DA - 193)) | (1 << (sdpParser.U_00DB - 193)) | (1 << (sdpParser.U_00DC - 193)) | (1 << (sdpParser.U_00DD - 193)) | (1 << (sdpParser.U_00DE - 193)) | (1 << (sdpParser.U_00DF - 193)) | (1 << (sdpParser.U_00E0 - 193)) | (1 << (sdpParser.U_00E1 - 193)) | (1 << (sdpParser.U_00E2 - 193)) | (1 << (sdpParser.U_00E3 - 193)) | (1 << (sdpParser.U_00E4 - 193)) | (1 << (sdpParser.U_00E5 - 193)) | (1 << (sdpParser.U_00E6 - 193)) | (1 << (sdpParser.U_00E7 - 193)) | (1 << (sdpParser.U_00E8 - 193)) | (1 << (sdpParser.U_00E9 - 193)) | (1 << (sdpParser.U_00EA - 193)) | (1 << (sdpParser.U_00EB - 193)) | (1 << (sdpParser.U_00EC - 193)) | (1 << (sdpParser.U_00ED - 193)) | (1 << (sdpParser.U_00EE - 193)) | (1 << (sdpParser.U_00EF - 193)) | (1 << (sdpParser.U_00F0 - 193)) | (1 << (sdpParser.U_00F1 - 193)) | (1 << (sdpParser.U_00F2 - 193)) | (1 << (sdpParser.U_00F3 - 193)) | (1 << (sdpParser.U_00F4 - 193)) | (1 << (sdpParser.U_00F5 - 193)) | (1 << (sdpParser.U_00F6 - 193)) | (1 << (sdpParser.U_00F7 - 193)) | (1 << (sdpParser.U_00F8 - 193)) | (1 << (sdpParser.U_00F9 - 193)) | (1 << (sdpParser.U_00FA - 193)) | (1 << (sdpParser.U_00FB - 193)) | (1 << (sdpParser.U_00FC - 193)) | (1 << (sdpParser.U_00FD - 193)) | (1 << (sdpParser.U_00FE - 193)) | (1 << (sdpParser.U_00FF - 193)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Token_charContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def getRuleIndex(self):
return sdpParser.RULE_token_char
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterToken_char" ):
listener.enterToken_char(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitToken_char" ):
listener.exitToken_char(self)
def token_char(self):
localctx = sdpParser.Token_charContext(self, self._ctx, self.state)
self.enterRule(localctx, 132, self.RULE_token_char)
self._la = 0 # Token type
try:
self.state = 943
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION]:
self.enterOuterAlt(localctx, 1)
self.state = 936
self.match(sdpParser.EXCLAMATION)
pass
elif token in [sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE]:
self.enterOuterAlt(localctx, 2)
self.state = 937
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.ASTERISK, sdpParser.PLUS]:
self.enterOuterAlt(localctx, 3)
self.state = 938
_la = self._input.LA(1)
if not(_la==sdpParser.ASTERISK or _la==sdpParser.PLUS):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.DASH, sdpParser.PERIOD]:
self.enterOuterAlt(localctx, 4)
self.state = 939
_la = self._input.LA(1)
if not(_la==sdpParser.DASH or _la==sdpParser.PERIOD):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 5)
self.state = 940
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z]:
self.enterOuterAlt(localctx, 6)
self.state = 941
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE]:
self.enterOuterAlt(localctx, 7)
self.state = 942
_la = self._input.LA(1)
if not(((((_la - 66)) & ~0x3f) == 0 and ((1 << (_la - 66)) & ((1 << (sdpParser.CARAT - 66)) | (1 << (sdpParser.UNDERSCORE - 66)) | (1 << (sdpParser.ACCENT - 66)) | (1 << (sdpParser.A - 66)) | (1 << (sdpParser.B - 66)) | (1 << (sdpParser.C - 66)) | (1 << (sdpParser.D - 66)) | (1 << (sdpParser.E - 66)) | (1 << (sdpParser.F - 66)) | (1 << (sdpParser.G - 66)) | (1 << (sdpParser.H - 66)) | (1 << (sdpParser.I - 66)) | (1 << (sdpParser.J - 66)) | (1 << (sdpParser.K - 66)) | (1 << (sdpParser.L - 66)) | (1 << (sdpParser.M - 66)) | (1 << (sdpParser.N - 66)) | (1 << (sdpParser.O - 66)) | (1 << (sdpParser.P - 66)) | (1 << (sdpParser.Q - 66)) | (1 << (sdpParser.R - 66)) | (1 << (sdpParser.S - 66)) | (1 << (sdpParser.T - 66)) | (1 << (sdpParser.U - 66)) | (1 << (sdpParser.V - 66)) | (1 << (sdpParser.W - 66)) | (1 << (sdpParser.X - 66)) | (1 << (sdpParser.Y - 66)) | (1 << (sdpParser.Z - 66)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 66)) | (1 << (sdpParser.PIPE - 66)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 66)) | (1 << (sdpParser.TILDE - 66)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TokenContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def token_char(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Token_charContext)
else:
return self.getTypedRuleContext(sdpParser.Token_charContext,i)
def getRuleIndex(self):
return sdpParser.RULE_token
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterToken" ):
listener.enterToken(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitToken" ):
listener.exitToken(self)
def token(self):
localctx = sdpParser.TokenContext(self, self._ctx, self.state)
self.enterRule(localctx, 134, self.RULE_token)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 946
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 945
self.token_char()
self.state = 948
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 66)) & ~0x3f) == 0 and ((1 << (_la - 66)) & ((1 << (sdpParser.CARAT - 66)) | (1 << (sdpParser.UNDERSCORE - 66)) | (1 << (sdpParser.ACCENT - 66)) | (1 << (sdpParser.A - 66)) | (1 << (sdpParser.B - 66)) | (1 << (sdpParser.C - 66)) | (1 << (sdpParser.D - 66)) | (1 << (sdpParser.E - 66)) | (1 << (sdpParser.F - 66)) | (1 << (sdpParser.G - 66)) | (1 << (sdpParser.H - 66)) | (1 << (sdpParser.I - 66)) | (1 << (sdpParser.J - 66)) | (1 << (sdpParser.K - 66)) | (1 << (sdpParser.L - 66)) | (1 << (sdpParser.M - 66)) | (1 << (sdpParser.N - 66)) | (1 << (sdpParser.O - 66)) | (1 << (sdpParser.P - 66)) | (1 << (sdpParser.Q - 66)) | (1 << (sdpParser.R - 66)) | (1 << (sdpParser.S - 66)) | (1 << (sdpParser.T - 66)) | (1 << (sdpParser.U - 66)) | (1 << (sdpParser.V - 66)) | (1 << (sdpParser.W - 66)) | (1 << (sdpParser.X - 66)) | (1 << (sdpParser.Y - 66)) | (1 << (sdpParser.Z - 66)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 66)) | (1 << (sdpParser.PIPE - 66)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 66)) | (1 << (sdpParser.TILDE - 66)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Email_safeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U_0001(self):
return self.getToken(sdpParser.U_0001, 0)
def U_0002(self):
return self.getToken(sdpParser.U_0002, 0)
def U_0003(self):
return self.getToken(sdpParser.U_0003, 0)
def U_0004(self):
return self.getToken(sdpParser.U_0004, 0)
def U_0005(self):
return self.getToken(sdpParser.U_0005, 0)
def U_0006(self):
return self.getToken(sdpParser.U_0006, 0)
def U_0007(self):
return self.getToken(sdpParser.U_0007, 0)
def U_0008(self):
return self.getToken(sdpParser.U_0008, 0)
def TAB(self):
return self.getToken(sdpParser.TAB, 0)
def U_000B(self):
return self.getToken(sdpParser.U_000B, 0)
def U_000C(self):
return self.getToken(sdpParser.U_000C, 0)
def U_000E(self):
return self.getToken(sdpParser.U_000E, 0)
def U_000F(self):
return self.getToken(sdpParser.U_000F, 0)
def U_0010(self):
return self.getToken(sdpParser.U_0010, 0)
def U_0011(self):
return self.getToken(sdpParser.U_0011, 0)
def U_0012(self):
return self.getToken(sdpParser.U_0012, 0)
def U_0013(self):
return self.getToken(sdpParser.U_0013, 0)
def U_0014(self):
return self.getToken(sdpParser.U_0014, 0)
def U_0015(self):
return self.getToken(sdpParser.U_0015, 0)
def U_0016(self):
return self.getToken(sdpParser.U_0016, 0)
def U_0017(self):
return self.getToken(sdpParser.U_0017, 0)
def U_0018(self):
return self.getToken(sdpParser.U_0018, 0)
def U_0019(self):
return self.getToken(sdpParser.U_0019, 0)
def U_001A(self):
return self.getToken(sdpParser.U_001A, 0)
def U_001B(self):
return self.getToken(sdpParser.U_001B, 0)
def U_001C(self):
return self.getToken(sdpParser.U_001C, 0)
def U_001D(self):
return self.getToken(sdpParser.U_001D, 0)
def U_001E(self):
return self.getToken(sdpParser.U_001E, 0)
def U_001F(self):
return self.getToken(sdpParser.U_001F, 0)
def SPACE(self):
return self.getToken(sdpParser.SPACE, 0)
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def QUOTE(self):
return self.getToken(sdpParser.QUOTE, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def BACKSLASH(self):
return self.getToken(sdpParser.BACKSLASH, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def U_007F(self):
return self.getToken(sdpParser.U_007F, 0)
def U_0080(self):
return self.getToken(sdpParser.U_0080, 0)
def U_0081(self):
return self.getToken(sdpParser.U_0081, 0)
def U_0082(self):
return self.getToken(sdpParser.U_0082, 0)
def U_0083(self):
return self.getToken(sdpParser.U_0083, 0)
def U_0084(self):
return self.getToken(sdpParser.U_0084, 0)
def U_0085(self):
return self.getToken(sdpParser.U_0085, 0)
def U_0086(self):
return self.getToken(sdpParser.U_0086, 0)
def U_0087(self):
return self.getToken(sdpParser.U_0087, 0)
def U_0088(self):
return self.getToken(sdpParser.U_0088, 0)
def U_0089(self):
return self.getToken(sdpParser.U_0089, 0)
def U_008A(self):
return self.getToken(sdpParser.U_008A, 0)
def U_008B(self):
return self.getToken(sdpParser.U_008B, 0)
def U_008C(self):
return self.getToken(sdpParser.U_008C, 0)
def U_008D(self):
return self.getToken(sdpParser.U_008D, 0)
def U_008E(self):
return self.getToken(sdpParser.U_008E, 0)
def U_008F(self):
return self.getToken(sdpParser.U_008F, 0)
def U_0090(self):
return self.getToken(sdpParser.U_0090, 0)
def U_0091(self):
return self.getToken(sdpParser.U_0091, 0)
def U_0092(self):
return self.getToken(sdpParser.U_0092, 0)
def U_0093(self):
return self.getToken(sdpParser.U_0093, 0)
def U_0094(self):
return self.getToken(sdpParser.U_0094, 0)
def U_0095(self):
return self.getToken(sdpParser.U_0095, 0)
def U_0096(self):
return self.getToken(sdpParser.U_0096, 0)
def U_0097(self):
return self.getToken(sdpParser.U_0097, 0)
def U_0098(self):
return self.getToken(sdpParser.U_0098, 0)
def U_0099(self):
return self.getToken(sdpParser.U_0099, 0)
def U_009A(self):
return self.getToken(sdpParser.U_009A, 0)
def U_009B(self):
return self.getToken(sdpParser.U_009B, 0)
def U_009C(self):
return self.getToken(sdpParser.U_009C, 0)
def U_009D(self):
return self.getToken(sdpParser.U_009D, 0)
def U_009E(self):
return self.getToken(sdpParser.U_009E, 0)
def U_009F(self):
return self.getToken(sdpParser.U_009F, 0)
def U_00A0(self):
return self.getToken(sdpParser.U_00A0, 0)
def U_00A1(self):
return self.getToken(sdpParser.U_00A1, 0)
def U_00A2(self):
return self.getToken(sdpParser.U_00A2, 0)
def U_00A3(self):
return self.getToken(sdpParser.U_00A3, 0)
def U_00A4(self):
return self.getToken(sdpParser.U_00A4, 0)
def U_00A5(self):
return self.getToken(sdpParser.U_00A5, 0)
def U_00A6(self):
return self.getToken(sdpParser.U_00A6, 0)
def U_00A7(self):
return self.getToken(sdpParser.U_00A7, 0)
def U_00A8(self):
return self.getToken(sdpParser.U_00A8, 0)
def U_00A9(self):
return self.getToken(sdpParser.U_00A9, 0)
def U_00AA(self):
return self.getToken(sdpParser.U_00AA, 0)
def U_00AB(self):
return self.getToken(sdpParser.U_00AB, 0)
def U_00AC(self):
return self.getToken(sdpParser.U_00AC, 0)
def U_00AD(self):
return self.getToken(sdpParser.U_00AD, 0)
def U_00AE(self):
return self.getToken(sdpParser.U_00AE, 0)
def U_00AF(self):
return self.getToken(sdpParser.U_00AF, 0)
def U_00B0(self):
return self.getToken(sdpParser.U_00B0, 0)
def U_00B1(self):
return self.getToken(sdpParser.U_00B1, 0)
def U_00B2(self):
return self.getToken(sdpParser.U_00B2, 0)
def U_00B3(self):
return self.getToken(sdpParser.U_00B3, 0)
def U_00B4(self):
return self.getToken(sdpParser.U_00B4, 0)
def U_00B5(self):
return self.getToken(sdpParser.U_00B5, 0)
def U_00B6(self):
return self.getToken(sdpParser.U_00B6, 0)
def U_00B7(self):
return self.getToken(sdpParser.U_00B7, 0)
def U_00B8(self):
return self.getToken(sdpParser.U_00B8, 0)
def U_00B9(self):
return self.getToken(sdpParser.U_00B9, 0)
def U_00BA(self):
return self.getToken(sdpParser.U_00BA, 0)
def U_00BB(self):
return self.getToken(sdpParser.U_00BB, 0)
def U_00BC(self):
return self.getToken(sdpParser.U_00BC, 0)
def U_00BD(self):
return self.getToken(sdpParser.U_00BD, 0)
def U_00BE(self):
return self.getToken(sdpParser.U_00BE, 0)
def U_00BF(self):
return self.getToken(sdpParser.U_00BF, 0)
def U_00C0(self):
return self.getToken(sdpParser.U_00C0, 0)
def U_00C1(self):
return self.getToken(sdpParser.U_00C1, 0)
def U_00C2(self):
return self.getToken(sdpParser.U_00C2, 0)
def U_00C3(self):
return self.getToken(sdpParser.U_00C3, 0)
def U_00C4(self):
return self.getToken(sdpParser.U_00C4, 0)
def U_00C5(self):
return self.getToken(sdpParser.U_00C5, 0)
def U_00C6(self):
return self.getToken(sdpParser.U_00C6, 0)
def U_00C7(self):
return self.getToken(sdpParser.U_00C7, 0)
def U_00C8(self):
return self.getToken(sdpParser.U_00C8, 0)
def U_00C9(self):
return self.getToken(sdpParser.U_00C9, 0)
def U_00CA(self):
return self.getToken(sdpParser.U_00CA, 0)
def U_00CB(self):
return self.getToken(sdpParser.U_00CB, 0)
def U_00CC(self):
return self.getToken(sdpParser.U_00CC, 0)
def U_00CD(self):
return self.getToken(sdpParser.U_00CD, 0)
def U_00CE(self):
return self.getToken(sdpParser.U_00CE, 0)
def U_00CF(self):
return self.getToken(sdpParser.U_00CF, 0)
def U_00D0(self):
return self.getToken(sdpParser.U_00D0, 0)
def U_00D1(self):
return self.getToken(sdpParser.U_00D1, 0)
def U_00D2(self):
return self.getToken(sdpParser.U_00D2, 0)
def U_00D3(self):
return self.getToken(sdpParser.U_00D3, 0)
def U_00D4(self):
return self.getToken(sdpParser.U_00D4, 0)
def U_00D5(self):
return self.getToken(sdpParser.U_00D5, 0)
def U_00D6(self):
return self.getToken(sdpParser.U_00D6, 0)
def U_00D7(self):
return self.getToken(sdpParser.U_00D7, 0)
def U_00D8(self):
return self.getToken(sdpParser.U_00D8, 0)
def U_00D9(self):
return self.getToken(sdpParser.U_00D9, 0)
def U_00DA(self):
return self.getToken(sdpParser.U_00DA, 0)
def U_00DB(self):
return self.getToken(sdpParser.U_00DB, 0)
def U_00DC(self):
return self.getToken(sdpParser.U_00DC, 0)
def U_00DD(self):
return self.getToken(sdpParser.U_00DD, 0)
def U_00DE(self):
return self.getToken(sdpParser.U_00DE, 0)
def U_00DF(self):
return self.getToken(sdpParser.U_00DF, 0)
def U_00E0(self):
return self.getToken(sdpParser.U_00E0, 0)
def U_00E1(self):
return self.getToken(sdpParser.U_00E1, 0)
def U_00E2(self):
return self.getToken(sdpParser.U_00E2, 0)
def U_00E3(self):
return self.getToken(sdpParser.U_00E3, 0)
def U_00E4(self):
return self.getToken(sdpParser.U_00E4, 0)
def U_00E5(self):
return self.getToken(sdpParser.U_00E5, 0)
def U_00E6(self):
return self.getToken(sdpParser.U_00E6, 0)
def U_00E7(self):
return self.getToken(sdpParser.U_00E7, 0)
def U_00E8(self):
return self.getToken(sdpParser.U_00E8, 0)
def U_00E9(self):
return self.getToken(sdpParser.U_00E9, 0)
def U_00EA(self):
return self.getToken(sdpParser.U_00EA, 0)
def U_00EB(self):
return self.getToken(sdpParser.U_00EB, 0)
def U_00EC(self):
return self.getToken(sdpParser.U_00EC, 0)
def U_00ED(self):
return self.getToken(sdpParser.U_00ED, 0)
def U_00EE(self):
return self.getToken(sdpParser.U_00EE, 0)
def U_00EF(self):
return self.getToken(sdpParser.U_00EF, 0)
def U_00F0(self):
return self.getToken(sdpParser.U_00F0, 0)
def U_00F1(self):
return self.getToken(sdpParser.U_00F1, 0)
def U_00F2(self):
return self.getToken(sdpParser.U_00F2, 0)
def U_00F3(self):
return self.getToken(sdpParser.U_00F3, 0)
def U_00F4(self):
return self.getToken(sdpParser.U_00F4, 0)
def U_00F5(self):
return self.getToken(sdpParser.U_00F5, 0)
def U_00F6(self):
return self.getToken(sdpParser.U_00F6, 0)
def U_00F7(self):
return self.getToken(sdpParser.U_00F7, 0)
def U_00F8(self):
return self.getToken(sdpParser.U_00F8, 0)
def U_00F9(self):
return self.getToken(sdpParser.U_00F9, 0)
def U_00FA(self):
return self.getToken(sdpParser.U_00FA, 0)
def U_00FB(self):
return self.getToken(sdpParser.U_00FB, 0)
def U_00FC(self):
return self.getToken(sdpParser.U_00FC, 0)
def U_00FD(self):
return self.getToken(sdpParser.U_00FD, 0)
def U_00FE(self):
return self.getToken(sdpParser.U_00FE, 0)
def U_00FF(self):
return self.getToken(sdpParser.U_00FF, 0)
def getRuleIndex(self):
return sdpParser.RULE_email_safe
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterEmail_safe" ):
listener.enterEmail_safe(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitEmail_safe" ):
listener.exitEmail_safe(self)
def email_safe(self):
localctx = sdpParser.Email_safeContext(self, self._ctx, self.state)
self.enterRule(localctx, 136, self.RULE_email_safe)
self._la = 0 # Token type
try:
self.state = 956
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.TAB, sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008]:
self.enterOuterAlt(localctx, 1)
self.state = 950
_la = self._input.LA(1)
if not(_la==sdpParser.TAB or ((((_la - 100)) & ~0x3f) == 0 and ((1 << (_la - 100)) & ((1 << (sdpParser.U_0001 - 100)) | (1 << (sdpParser.U_0002 - 100)) | (1 << (sdpParser.U_0003 - 100)) | (1 << (sdpParser.U_0004 - 100)) | (1 << (sdpParser.U_0005 - 100)) | (1 << (sdpParser.U_0006 - 100)) | (1 << (sdpParser.U_0007 - 100)) | (1 << (sdpParser.U_0008 - 100)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.U_000B, sdpParser.U_000C]:
self.enterOuterAlt(localctx, 2)
self.state = 951
_la = self._input.LA(1)
if not(_la==sdpParser.U_000B or _la==sdpParser.U_000C):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.SPACE, sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F]:
self.enterOuterAlt(localctx, 3)
self.state = 952
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.SPACE) | (1 << sdpParser.EXCLAMATION) | (1 << sdpParser.QUOTE) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE))) != 0) or ((((_la - 110)) & ~0x3f) == 0 and ((1 << (_la - 110)) & ((1 << (sdpParser.U_000E - 110)) | (1 << (sdpParser.U_000F - 110)) | (1 << (sdpParser.U_0010 - 110)) | (1 << (sdpParser.U_0011 - 110)) | (1 << (sdpParser.U_0012 - 110)) | (1 << (sdpParser.U_0013 - 110)) | (1 << (sdpParser.U_0014 - 110)) | (1 << (sdpParser.U_0015 - 110)) | (1 << (sdpParser.U_0016 - 110)) | (1 << (sdpParser.U_0017 - 110)) | (1 << (sdpParser.U_0018 - 110)) | (1 << (sdpParser.U_0019 - 110)) | (1 << (sdpParser.U_001A - 110)) | (1 << (sdpParser.U_001B - 110)) | (1 << (sdpParser.U_001C - 110)) | (1 << (sdpParser.U_001D - 110)) | (1 << (sdpParser.U_001E - 110)) | (1 << (sdpParser.U_001F - 110)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON]:
self.enterOuterAlt(localctx, 4)
self.state = 953
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.EQUALS]:
self.enterOuterAlt(localctx, 5)
self.state = 954
self.match(sdpParser.EQUALS)
pass
elif token in [sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE, sdpParser.BACKSLASH, sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE, sdpParser.U_007F, sdpParser.U_0080, sdpParser.U_0081, sdpParser.U_0082, sdpParser.U_0083, sdpParser.U_0084, sdpParser.U_0085, sdpParser.U_0086, sdpParser.U_0087, sdpParser.U_0088, sdpParser.U_0089, sdpParser.U_008A, sdpParser.U_008B, sdpParser.U_008C, sdpParser.U_008D, sdpParser.U_008E, sdpParser.U_008F, sdpParser.U_0090, sdpParser.U_0091, sdpParser.U_0092, sdpParser.U_0093, sdpParser.U_0094, sdpParser.U_0095, sdpParser.U_0096, sdpParser.U_0097, sdpParser.U_0098, sdpParser.U_0099, sdpParser.U_009A, sdpParser.U_009B, sdpParser.U_009C, sdpParser.U_009D, sdpParser.U_009E, sdpParser.U_009F, sdpParser.U_00A0, sdpParser.U_00A1, sdpParser.U_00A2, sdpParser.U_00A3, sdpParser.U_00A4, sdpParser.U_00A5, sdpParser.U_00A6, sdpParser.U_00A7, sdpParser.U_00A8, sdpParser.U_00A9, sdpParser.U_00AA, sdpParser.U_00AB, sdpParser.U_00AC, sdpParser.U_00AD, sdpParser.U_00AE, sdpParser.U_00AF, sdpParser.U_00B0, sdpParser.U_00B1, sdpParser.U_00B2, sdpParser.U_00B3, sdpParser.U_00B4, sdpParser.U_00B5, sdpParser.U_00B6, sdpParser.U_00B7, sdpParser.U_00B8, sdpParser.U_00B9, sdpParser.U_00BA, sdpParser.U_00BB, sdpParser.U_00BC, sdpParser.U_00BD, sdpParser.U_00BE, sdpParser.U_00BF, sdpParser.U_00C0, sdpParser.U_00C1, sdpParser.U_00C2, sdpParser.U_00C3, sdpParser.U_00C4, sdpParser.U_00C5, sdpParser.U_00C6, sdpParser.U_00C7, sdpParser.U_00C8, sdpParser.U_00C9, sdpParser.U_00CA, sdpParser.U_00CB, sdpParser.U_00CC, sdpParser.U_00CD, sdpParser.U_00CE, sdpParser.U_00CF, sdpParser.U_00D0, sdpParser.U_00D1, sdpParser.U_00D2, sdpParser.U_00D3, sdpParser.U_00D4, sdpParser.U_00D5, sdpParser.U_00D6, sdpParser.U_00D7, sdpParser.U_00D8, sdpParser.U_00D9, sdpParser.U_00DA, sdpParser.U_00DB, sdpParser.U_00DC, sdpParser.U_00DD, sdpParser.U_00DE, sdpParser.U_00DF, sdpParser.U_00E0, sdpParser.U_00E1, sdpParser.U_00E2, sdpParser.U_00E3, sdpParser.U_00E4, sdpParser.U_00E5, sdpParser.U_00E6, sdpParser.U_00E7, sdpParser.U_00E8, sdpParser.U_00E9, sdpParser.U_00EA, sdpParser.U_00EB, sdpParser.U_00EC, sdpParser.U_00ED, sdpParser.U_00EE, sdpParser.U_00EF, sdpParser.U_00F0, sdpParser.U_00F1, sdpParser.U_00F2, sdpParser.U_00F3, sdpParser.U_00F4, sdpParser.U_00F5, sdpParser.U_00F6, sdpParser.U_00F7, sdpParser.U_00F8, sdpParser.U_00F9, sdpParser.U_00FA, sdpParser.U_00FB, sdpParser.U_00FC, sdpParser.U_00FD, sdpParser.U_00FE, sdpParser.U_00FF]:
self.enterOuterAlt(localctx, 6)
self.state = 955
_la = self._input.LA(1)
if not(((((_la - 35)) & ~0x3f) == 0 and ((1 << (_la - 35)) & ((1 << (sdpParser.QUESTION - 35)) | (1 << (sdpParser.AT - 35)) | (1 << (sdpParser.CAP_A - 35)) | (1 << (sdpParser.CAP_B - 35)) | (1 << (sdpParser.CAP_C - 35)) | (1 << (sdpParser.CAP_D - 35)) | (1 << (sdpParser.CAP_E - 35)) | (1 << (sdpParser.CAP_F - 35)) | (1 << (sdpParser.CAP_G - 35)) | (1 << (sdpParser.CAP_H - 35)) | (1 << (sdpParser.CAP_I - 35)) | (1 << (sdpParser.CAP_J - 35)) | (1 << (sdpParser.CAP_K - 35)) | (1 << (sdpParser.CAP_L - 35)) | (1 << (sdpParser.CAP_M - 35)) | (1 << (sdpParser.CAP_N - 35)) | (1 << (sdpParser.CAP_O - 35)) | (1 << (sdpParser.CAP_P - 35)) | (1 << (sdpParser.CAP_Q - 35)) | (1 << (sdpParser.CAP_R - 35)) | (1 << (sdpParser.CAP_S - 35)) | (1 << (sdpParser.CAP_T - 35)) | (1 << (sdpParser.CAP_U - 35)) | (1 << (sdpParser.CAP_V - 35)) | (1 << (sdpParser.CAP_W - 35)) | (1 << (sdpParser.CAP_X - 35)) | (1 << (sdpParser.CAP_Y - 35)) | (1 << (sdpParser.CAP_Z - 35)) | (1 << (sdpParser.LEFT_BRACE - 35)) | (1 << (sdpParser.BACKSLASH - 35)) | (1 << (sdpParser.RIGHT_BRACE - 35)) | (1 << (sdpParser.CARAT - 35)) | (1 << (sdpParser.UNDERSCORE - 35)) | (1 << (sdpParser.ACCENT - 35)) | (1 << (sdpParser.A - 35)) | (1 << (sdpParser.B - 35)) | (1 << (sdpParser.C - 35)) | (1 << (sdpParser.D - 35)) | (1 << (sdpParser.E - 35)) | (1 << (sdpParser.F - 35)) | (1 << (sdpParser.G - 35)) | (1 << (sdpParser.H - 35)) | (1 << (sdpParser.I - 35)) | (1 << (sdpParser.J - 35)) | (1 << (sdpParser.K - 35)) | (1 << (sdpParser.L - 35)) | (1 << (sdpParser.M - 35)) | (1 << (sdpParser.N - 35)) | (1 << (sdpParser.O - 35)) | (1 << (sdpParser.P - 35)) | (1 << (sdpParser.Q - 35)) | (1 << (sdpParser.R - 35)) | (1 << (sdpParser.S - 35)) | (1 << (sdpParser.T - 35)) | (1 << (sdpParser.U - 35)) | (1 << (sdpParser.V - 35)) | (1 << (sdpParser.W - 35)) | (1 << (sdpParser.X - 35)) | (1 << (sdpParser.Y - 35)) | (1 << (sdpParser.Z - 35)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 35)) | (1 << (sdpParser.PIPE - 35)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 35)) | (1 << (sdpParser.TILDE - 35)))) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & ((1 << (sdpParser.U_007F - 128)) | (1 << (sdpParser.U_0080 - 128)) | (1 << (sdpParser.U_0081 - 128)) | (1 << (sdpParser.U_0082 - 128)) | (1 << (sdpParser.U_0083 - 128)) | (1 << (sdpParser.U_0084 - 128)) | (1 << (sdpParser.U_0085 - 128)) | (1 << (sdpParser.U_0086 - 128)) | (1 << (sdpParser.U_0087 - 128)) | (1 << (sdpParser.U_0088 - 128)) | (1 << (sdpParser.U_0089 - 128)) | (1 << (sdpParser.U_008A - 128)) | (1 << (sdpParser.U_008B - 128)) | (1 << (sdpParser.U_008C - 128)) | (1 << (sdpParser.U_008D - 128)) | (1 << (sdpParser.U_008E - 128)) | (1 << (sdpParser.U_008F - 128)) | (1 << (sdpParser.U_0090 - 128)) | (1 << (sdpParser.U_0091 - 128)) | (1 << (sdpParser.U_0092 - 128)) | (1 << (sdpParser.U_0093 - 128)) | (1 << (sdpParser.U_0094 - 128)) | (1 << (sdpParser.U_0095 - 128)) | (1 << (sdpParser.U_0096 - 128)) | (1 << (sdpParser.U_0097 - 128)) | (1 << (sdpParser.U_0098 - 128)) | (1 << (sdpParser.U_0099 - 128)) | (1 << (sdpParser.U_009A - 128)) | (1 << (sdpParser.U_009B - 128)) | (1 << (sdpParser.U_009C - 128)) | (1 << (sdpParser.U_009D - 128)) | (1 << (sdpParser.U_009E - 128)) | (1 << (sdpParser.U_009F - 128)) | (1 << (sdpParser.U_00A0 - 128)) | (1 << (sdpParser.U_00A1 - 128)) | (1 << (sdpParser.U_00A2 - 128)) | (1 << (sdpParser.U_00A3 - 128)) | (1 << (sdpParser.U_00A4 - 128)) | (1 << (sdpParser.U_00A5 - 128)) | (1 << (sdpParser.U_00A6 - 128)) | (1 << (sdpParser.U_00A7 - 128)) | (1 << (sdpParser.U_00A8 - 128)) | (1 << (sdpParser.U_00A9 - 128)) | (1 << (sdpParser.U_00AA - 128)) | (1 << (sdpParser.U_00AB - 128)) | (1 << (sdpParser.U_00AC - 128)) | (1 << (sdpParser.U_00AD - 128)) | (1 << (sdpParser.U_00AE - 128)) | (1 << (sdpParser.U_00AF - 128)) | (1 << (sdpParser.U_00B0 - 128)) | (1 << (sdpParser.U_00B1 - 128)) | (1 << (sdpParser.U_00B2 - 128)) | (1 << (sdpParser.U_00B3 - 128)) | (1 << (sdpParser.U_00B4 - 128)) | (1 << (sdpParser.U_00B5 - 128)) | (1 << (sdpParser.U_00B6 - 128)) | (1 << (sdpParser.U_00B7 - 128)) | (1 << (sdpParser.U_00B8 - 128)) | (1 << (sdpParser.U_00B9 - 128)) | (1 << (sdpParser.U_00BA - 128)) | (1 << (sdpParser.U_00BB - 128)) | (1 << (sdpParser.U_00BC - 128)) | (1 << (sdpParser.U_00BD - 128)) | (1 << (sdpParser.U_00BE - 128)))) != 0) or ((((_la - 192)) & ~0x3f) == 0 and ((1 << (_la - 192)) & ((1 << (sdpParser.U_00BF - 192)) | (1 << (sdpParser.U_00C0 - 192)) | (1 << (sdpParser.U_00C1 - 192)) | (1 << (sdpParser.U_00C2 - 192)) | (1 << (sdpParser.U_00C3 - 192)) | (1 << (sdpParser.U_00C4 - 192)) | (1 << (sdpParser.U_00C5 - 192)) | (1 << (sdpParser.U_00C6 - 192)) | (1 << (sdpParser.U_00C7 - 192)) | (1 << (sdpParser.U_00C8 - 192)) | (1 << (sdpParser.U_00C9 - 192)) | (1 << (sdpParser.U_00CA - 192)) | (1 << (sdpParser.U_00CB - 192)) | (1 << (sdpParser.U_00CC - 192)) | (1 << (sdpParser.U_00CD - 192)) | (1 << (sdpParser.U_00CE - 192)) | (1 << (sdpParser.U_00CF - 192)) | (1 << (sdpParser.U_00D0 - 192)) | (1 << (sdpParser.U_00D1 - 192)) | (1 << (sdpParser.U_00D2 - 192)) | (1 << (sdpParser.U_00D3 - 192)) | (1 << (sdpParser.U_00D4 - 192)) | (1 << (sdpParser.U_00D5 - 192)) | (1 << (sdpParser.U_00D6 - 192)) | (1 << (sdpParser.U_00D7 - 192)) | (1 << (sdpParser.U_00D8 - 192)) | (1 << (sdpParser.U_00D9 - 192)) | (1 << (sdpParser.U_00DA - 192)) | (1 << (sdpParser.U_00DB - 192)) | (1 << (sdpParser.U_00DC - 192)) | (1 << (sdpParser.U_00DD - 192)) | (1 << (sdpParser.U_00DE - 192)) | (1 << (sdpParser.U_00DF - 192)) | (1 << (sdpParser.U_00E0 - 192)) | (1 << (sdpParser.U_00E1 - 192)) | (1 << (sdpParser.U_00E2 - 192)) | (1 << (sdpParser.U_00E3 - 192)) | (1 << (sdpParser.U_00E4 - 192)) | (1 << (sdpParser.U_00E5 - 192)) | (1 << (sdpParser.U_00E6 - 192)) | (1 << (sdpParser.U_00E7 - 192)) | (1 << (sdpParser.U_00E8 - 192)) | (1 << (sdpParser.U_00E9 - 192)) | (1 << (sdpParser.U_00EA - 192)) | (1 << (sdpParser.U_00EB - 192)) | (1 << (sdpParser.U_00EC - 192)) | (1 << (sdpParser.U_00ED - 192)) | (1 << (sdpParser.U_00EE - 192)) | (1 << (sdpParser.U_00EF - 192)) | (1 << (sdpParser.U_00F0 - 192)) | (1 << (sdpParser.U_00F1 - 192)) | (1 << (sdpParser.U_00F2 - 192)) | (1 << (sdpParser.U_00F3 - 192)) | (1 << (sdpParser.U_00F4 - 192)) | (1 << (sdpParser.U_00F5 - 192)) | (1 << (sdpParser.U_00F6 - 192)) | (1 << (sdpParser.U_00F7 - 192)) | (1 << (sdpParser.U_00F8 - 192)) | (1 << (sdpParser.U_00F9 - 192)) | (1 << (sdpParser.U_00FA - 192)) | (1 << (sdpParser.U_00FB - 192)) | (1 << (sdpParser.U_00FC - 192)) | (1 << (sdpParser.U_00FD - 192)) | (1 << (sdpParser.U_00FE - 192)))) != 0) or _la==sdpParser.U_00FF):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IntegerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pos_digit(self):
return self.getTypedRuleContext(sdpParser.Pos_digitContext,0)
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_integer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInteger" ):
listener.enterInteger(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInteger" ):
listener.exitInteger(self)
def integer(self):
localctx = sdpParser.IntegerContext(self, self._ctx, self.state)
self.enterRule(localctx, 138, self.RULE_integer)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 958
self.pos_digit()
self.state = 962
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0):
self.state = 959
self.digit()
self.state = 964
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Alpha_numericContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def alpha(self):
return self.getTypedRuleContext(sdpParser.AlphaContext,0)
def digit(self):
return self.getTypedRuleContext(sdpParser.DigitContext,0)
def getRuleIndex(self):
return sdpParser.RULE_alpha_numeric
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAlpha_numeric" ):
listener.enterAlpha_numeric(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAlpha_numeric" ):
listener.exitAlpha_numeric(self)
def alpha_numeric(self):
localctx = sdpParser.Alpha_numericContext(self, self._ctx, self.state)
self.enterRule(localctx, 140, self.RULE_alpha_numeric)
try:
self.state = 967
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.enterOuterAlt(localctx, 1)
self.state = 965
self.alpha()
pass
elif token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 2)
self.state = 966
self.digit()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Pos_digitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def getRuleIndex(self):
return sdpParser.RULE_pos_digit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPos_digit" ):
listener.enterPos_digit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPos_digit" ):
listener.exitPos_digit(self)
def pos_digit(self):
localctx = sdpParser.Pos_digitContext(self, self._ctx, self.state)
self.enterRule(localctx, 142, self.RULE_pos_digit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 969
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Decimal_ucharContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def pos_digit(self):
return self.getTypedRuleContext(sdpParser.Pos_digitContext,0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.TWO)
else:
return self.getToken(sdpParser.TWO, i)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.FIVE)
else:
return self.getToken(sdpParser.FIVE, i)
def getRuleIndex(self):
return sdpParser.RULE_decimal_uchar
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDecimal_uchar" ):
listener.enterDecimal_uchar(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDecimal_uchar" ):
listener.exitDecimal_uchar(self)
def decimal_uchar(self):
localctx = sdpParser.Decimal_ucharContext(self, self._ctx, self.state)
self.enterRule(localctx, 144, self.RULE_decimal_uchar)
self._la = 0 # Token type
try:
self.state = 988
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,81,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 971
self.digit()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 972
self.pos_digit()
self.state = 973
self.digit()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 975
self.match(sdpParser.ONE)
self.state = 976
self.digit()
self.state = 978
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 977
self.digit()
self.state = 980
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
break
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 982
self.match(sdpParser.TWO)
self.state = 983
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 984
self.digit()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 985
self.match(sdpParser.TWO)
self.state = 986
self.match(sdpParser.FIVE)
self.state = 987
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Addr_specContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def local_part(self):
return self.getTypedRuleContext(sdpParser.Local_partContext,0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def domain(self):
return self.getTypedRuleContext(sdpParser.DomainContext,0)
def getRuleIndex(self):
return sdpParser.RULE_addr_spec
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAddr_spec" ):
listener.enterAddr_spec(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAddr_spec" ):
listener.exitAddr_spec(self)
def addr_spec(self):
localctx = sdpParser.Addr_specContext(self, self._ctx, self.state)
self.enterRule(localctx, 146, self.RULE_addr_spec)
try:
self.enterOuterAlt(localctx, 1)
self.state = 990
self.local_part()
self.state = 991
self.match(sdpParser.AT)
self.state = 992
self.domain()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Local_partContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def dot_atom(self):
return self.getTypedRuleContext(sdpParser.Dot_atomContext,0)
def quoted_string(self):
return self.getTypedRuleContext(sdpParser.Quoted_stringContext,0)
def obs_local_part(self):
return self.getTypedRuleContext(sdpParser.Obs_local_partContext,0)
def getRuleIndex(self):
return sdpParser.RULE_local_part
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLocal_part" ):
listener.enterLocal_part(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLocal_part" ):
listener.exitLocal_part(self)
def local_part(self):
localctx = sdpParser.Local_partContext(self, self._ctx, self.state)
self.enterRule(localctx, 148, self.RULE_local_part)
try:
self.state = 997
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,82,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 994
self.dot_atom()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 995
self.quoted_string()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 996
self.obs_local_part()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DomainContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def dot_atom(self):
return self.getTypedRuleContext(sdpParser.Dot_atomContext,0)
def domain_literal(self):
return self.getTypedRuleContext(sdpParser.Domain_literalContext,0)
def obs_domain(self):
return self.getTypedRuleContext(sdpParser.Obs_domainContext,0)
def getRuleIndex(self):
return sdpParser.RULE_domain
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDomain" ):
listener.enterDomain(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDomain" ):
listener.exitDomain(self)
def domain(self):
localctx = sdpParser.DomainContext(self, self._ctx, self.state)
self.enterRule(localctx, 150, self.RULE_domain)
try:
self.state = 1002
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,83,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 999
self.dot_atom()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1000
self.domain_literal()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1001
self.obs_domain()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Domain_literalContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def dtext(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DtextContext)
else:
return self.getTypedRuleContext(sdpParser.DtextContext,i)
def fws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.FwsContext)
else:
return self.getTypedRuleContext(sdpParser.FwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_domain_literal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDomain_literal" ):
listener.enterDomain_literal(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDomain_literal" ):
listener.exitDomain_literal(self)
def domain_literal(self):
localctx = sdpParser.Domain_literalContext(self, self._ctx, self.state)
self.enterRule(localctx, 152, self.RULE_domain_literal)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1005
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1004
self.cfws()
self.state = 1007
self.match(sdpParser.LEFT_BRACE)
self.state = 1014
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,86,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1009
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1008
self.fws()
self.state = 1011
self.dtext()
self.state = 1016
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,86,self._ctx)
self.state = 1018
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1017
self.fws()
self.state = 1020
self.match(sdpParser.RIGHT_BRACE)
self.state = 1022
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,88,self._ctx)
if la_ == 1:
self.state = 1021
self.cfws()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DtextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def QUOTE(self):
return self.getToken(sdpParser.QUOTE, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def obs_dtext(self):
return self.getTypedRuleContext(sdpParser.Obs_dtextContext,0)
def getRuleIndex(self):
return sdpParser.RULE_dtext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDtext" ):
listener.enterDtext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDtext" ):
listener.exitDtext(self)
def dtext(self):
localctx = sdpParser.DtextContext(self, self._ctx, self.state)
self.enterRule(localctx, 154, self.RULE_dtext)
self._la = 0 # Token type
try:
self.state = 1027
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z]:
self.enterOuterAlt(localctx, 1)
self.state = 1024
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.QUOTE) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.LESS_THAN) | (1 << sdpParser.EQUALS) | (1 << sdpParser.GREATER_THAN) | (1 << sdpParser.QUESTION) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE]:
self.enterOuterAlt(localctx, 2)
self.state = 1025
_la = self._input.LA(1)
if not(((((_la - 66)) & ~0x3f) == 0 and ((1 << (_la - 66)) & ((1 << (sdpParser.CARAT - 66)) | (1 << (sdpParser.UNDERSCORE - 66)) | (1 << (sdpParser.ACCENT - 66)) | (1 << (sdpParser.A - 66)) | (1 << (sdpParser.B - 66)) | (1 << (sdpParser.C - 66)) | (1 << (sdpParser.D - 66)) | (1 << (sdpParser.E - 66)) | (1 << (sdpParser.F - 66)) | (1 << (sdpParser.G - 66)) | (1 << (sdpParser.H - 66)) | (1 << (sdpParser.I - 66)) | (1 << (sdpParser.J - 66)) | (1 << (sdpParser.K - 66)) | (1 << (sdpParser.L - 66)) | (1 << (sdpParser.M - 66)) | (1 << (sdpParser.N - 66)) | (1 << (sdpParser.O - 66)) | (1 << (sdpParser.P - 66)) | (1 << (sdpParser.Q - 66)) | (1 << (sdpParser.R - 66)) | (1 << (sdpParser.S - 66)) | (1 << (sdpParser.T - 66)) | (1 << (sdpParser.U - 66)) | (1 << (sdpParser.V - 66)) | (1 << (sdpParser.W - 66)) | (1 << (sdpParser.X - 66)) | (1 << (sdpParser.Y - 66)) | (1 << (sdpParser.Z - 66)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 66)) | (1 << (sdpParser.PIPE - 66)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 66)) | (1 << (sdpParser.TILDE - 66)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.BACKSLASH, sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.enterOuterAlt(localctx, 3)
self.state = 1026
self.obs_dtext()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AtextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def alpha(self):
return self.getTypedRuleContext(sdpParser.AlphaContext,0)
def digit(self):
return self.getTypedRuleContext(sdpParser.DigitContext,0)
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def getRuleIndex(self):
return sdpParser.RULE_atext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtext" ):
listener.enterAtext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtext" ):
listener.exitAtext(self)
def atext(self):
localctx = sdpParser.AtextContext(self, self._ctx, self.state)
self.enterRule(localctx, 156, self.RULE_atext)
try:
self.state = 1050
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.enterOuterAlt(localctx, 1)
self.state = 1029
self.alpha()
pass
elif token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 2)
self.state = 1030
self.digit()
pass
elif token in [sdpParser.EXCLAMATION]:
self.enterOuterAlt(localctx, 3)
self.state = 1031
self.match(sdpParser.EXCLAMATION)
pass
elif token in [sdpParser.HASH]:
self.enterOuterAlt(localctx, 4)
self.state = 1032
self.match(sdpParser.HASH)
pass
elif token in [sdpParser.DOLLAR]:
self.enterOuterAlt(localctx, 5)
self.state = 1033
self.match(sdpParser.DOLLAR)
pass
elif token in [sdpParser.PERCENT]:
self.enterOuterAlt(localctx, 6)
self.state = 1034
self.match(sdpParser.PERCENT)
pass
elif token in [sdpParser.AMPERSAND]:
self.enterOuterAlt(localctx, 7)
self.state = 1035
self.match(sdpParser.AMPERSAND)
pass
elif token in [sdpParser.APOSTROPHE]:
self.enterOuterAlt(localctx, 8)
self.state = 1036
self.match(sdpParser.APOSTROPHE)
pass
elif token in [sdpParser.ASTERISK]:
self.enterOuterAlt(localctx, 9)
self.state = 1037
self.match(sdpParser.ASTERISK)
pass
elif token in [sdpParser.PLUS]:
self.enterOuterAlt(localctx, 10)
self.state = 1038
self.match(sdpParser.PLUS)
pass
elif token in [sdpParser.DASH]:
self.enterOuterAlt(localctx, 11)
self.state = 1039
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.SLASH]:
self.enterOuterAlt(localctx, 12)
self.state = 1040
self.match(sdpParser.SLASH)
pass
elif token in [sdpParser.EQUALS]:
self.enterOuterAlt(localctx, 13)
self.state = 1041
self.match(sdpParser.EQUALS)
pass
elif token in [sdpParser.QUESTION]:
self.enterOuterAlt(localctx, 14)
self.state = 1042
self.match(sdpParser.QUESTION)
pass
elif token in [sdpParser.CARAT]:
self.enterOuterAlt(localctx, 15)
self.state = 1043
self.match(sdpParser.CARAT)
pass
elif token in [sdpParser.UNDERSCORE]:
self.enterOuterAlt(localctx, 16)
self.state = 1044
self.match(sdpParser.UNDERSCORE)
pass
elif token in [sdpParser.ACCENT]:
self.enterOuterAlt(localctx, 17)
self.state = 1045
self.match(sdpParser.ACCENT)
pass
elif token in [sdpParser.LEFT_CURLY_BRACE]:
self.enterOuterAlt(localctx, 18)
self.state = 1046
self.match(sdpParser.LEFT_CURLY_BRACE)
pass
elif token in [sdpParser.PIPE]:
self.enterOuterAlt(localctx, 19)
self.state = 1047
self.match(sdpParser.PIPE)
pass
elif token in [sdpParser.RIGHT_CURLY_BRACE]:
self.enterOuterAlt(localctx, 20)
self.state = 1048
self.match(sdpParser.RIGHT_CURLY_BRACE)
pass
elif token in [sdpParser.TILDE]:
self.enterOuterAlt(localctx, 21)
self.state = 1049
self.match(sdpParser.TILDE)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AtomContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def atext(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.AtextContext)
else:
return self.getTypedRuleContext(sdpParser.AtextContext,i)
def getRuleIndex(self):
return sdpParser.RULE_atom
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtom" ):
listener.enterAtom(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtom" ):
listener.exitAtom(self)
def atom(self):
localctx = sdpParser.AtomContext(self, self._ctx, self.state)
self.enterRule(localctx, 158, self.RULE_atom)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1053
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1052
self.cfws()
self.state = 1056
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1055
self.atext()
else:
raise NoViableAltException(self)
self.state = 1058
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,92,self._ctx)
self.state = 1061
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,93,self._ctx)
if la_ == 1:
self.state = 1060
self.cfws()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dot_atom_textContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atext(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.AtextContext)
else:
return self.getTypedRuleContext(sdpParser.AtextContext,i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def getRuleIndex(self):
return sdpParser.RULE_dot_atom_text
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDot_atom_text" ):
listener.enterDot_atom_text(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDot_atom_text" ):
listener.exitDot_atom_text(self)
def dot_atom_text(self):
localctx = sdpParser.Dot_atom_textContext(self, self._ctx, self.state)
self.enterRule(localctx, 160, self.RULE_dot_atom_text)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1064
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1063
self.atext()
self.state = 1066
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.DASH) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.EQUALS) | (1 << sdpParser.QUESTION) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 66)) & ~0x3f) == 0 and ((1 << (_la - 66)) & ((1 << (sdpParser.CARAT - 66)) | (1 << (sdpParser.UNDERSCORE - 66)) | (1 << (sdpParser.ACCENT - 66)) | (1 << (sdpParser.A - 66)) | (1 << (sdpParser.B - 66)) | (1 << (sdpParser.C - 66)) | (1 << (sdpParser.D - 66)) | (1 << (sdpParser.E - 66)) | (1 << (sdpParser.F - 66)) | (1 << (sdpParser.G - 66)) | (1 << (sdpParser.H - 66)) | (1 << (sdpParser.I - 66)) | (1 << (sdpParser.J - 66)) | (1 << (sdpParser.K - 66)) | (1 << (sdpParser.L - 66)) | (1 << (sdpParser.M - 66)) | (1 << (sdpParser.N - 66)) | (1 << (sdpParser.O - 66)) | (1 << (sdpParser.P - 66)) | (1 << (sdpParser.Q - 66)) | (1 << (sdpParser.R - 66)) | (1 << (sdpParser.S - 66)) | (1 << (sdpParser.T - 66)) | (1 << (sdpParser.U - 66)) | (1 << (sdpParser.V - 66)) | (1 << (sdpParser.W - 66)) | (1 << (sdpParser.X - 66)) | (1 << (sdpParser.Y - 66)) | (1 << (sdpParser.Z - 66)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 66)) | (1 << (sdpParser.PIPE - 66)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 66)) | (1 << (sdpParser.TILDE - 66)))) != 0)):
break
self.state = 1076
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.PERIOD:
self.state = 1068
self.match(sdpParser.PERIOD)
self.state = 1070
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1069
self.atext()
self.state = 1072
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.DASH) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.EQUALS) | (1 << sdpParser.QUESTION) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 66)) & ~0x3f) == 0 and ((1 << (_la - 66)) & ((1 << (sdpParser.CARAT - 66)) | (1 << (sdpParser.UNDERSCORE - 66)) | (1 << (sdpParser.ACCENT - 66)) | (1 << (sdpParser.A - 66)) | (1 << (sdpParser.B - 66)) | (1 << (sdpParser.C - 66)) | (1 << (sdpParser.D - 66)) | (1 << (sdpParser.E - 66)) | (1 << (sdpParser.F - 66)) | (1 << (sdpParser.G - 66)) | (1 << (sdpParser.H - 66)) | (1 << (sdpParser.I - 66)) | (1 << (sdpParser.J - 66)) | (1 << (sdpParser.K - 66)) | (1 << (sdpParser.L - 66)) | (1 << (sdpParser.M - 66)) | (1 << (sdpParser.N - 66)) | (1 << (sdpParser.O - 66)) | (1 << (sdpParser.P - 66)) | (1 << (sdpParser.Q - 66)) | (1 << (sdpParser.R - 66)) | (1 << (sdpParser.S - 66)) | (1 << (sdpParser.T - 66)) | (1 << (sdpParser.U - 66)) | (1 << (sdpParser.V - 66)) | (1 << (sdpParser.W - 66)) | (1 << (sdpParser.X - 66)) | (1 << (sdpParser.Y - 66)) | (1 << (sdpParser.Z - 66)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 66)) | (1 << (sdpParser.PIPE - 66)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 66)) | (1 << (sdpParser.TILDE - 66)))) != 0)):
break
self.state = 1078
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dot_atomContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def dot_atom_text(self):
return self.getTypedRuleContext(sdpParser.Dot_atom_textContext,0)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_dot_atom
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDot_atom" ):
listener.enterDot_atom(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDot_atom" ):
listener.exitDot_atom(self)
def dot_atom(self):
localctx = sdpParser.Dot_atomContext(self, self._ctx, self.state)
self.enterRule(localctx, 162, self.RULE_dot_atom)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1080
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1079
self.cfws()
self.state = 1082
self.dot_atom_text()
self.state = 1084
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,98,self._ctx)
if la_ == 1:
self.state = 1083
self.cfws()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SpecialsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def BACKSLASH(self):
return self.getToken(sdpParser.BACKSLASH, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def dquote(self):
return self.getTypedRuleContext(sdpParser.DquoteContext,0)
def getRuleIndex(self):
return sdpParser.RULE_specials
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSpecials" ):
listener.enterSpecials(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSpecials" ):
listener.exitSpecials(self)
def specials(self):
localctx = sdpParser.SpecialsContext(self, self._ctx, self.state)
self.enterRule(localctx, 164, self.RULE_specials)
try:
self.state = 1099
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.LEFT_PAREN]:
self.enterOuterAlt(localctx, 1)
self.state = 1086
self.match(sdpParser.LEFT_PAREN)
pass
elif token in [sdpParser.RIGHT_PAREN]:
self.enterOuterAlt(localctx, 2)
self.state = 1087
self.match(sdpParser.RIGHT_PAREN)
pass
elif token in [sdpParser.LESS_THAN]:
self.enterOuterAlt(localctx, 3)
self.state = 1088
self.match(sdpParser.LESS_THAN)
pass
elif token in [sdpParser.GREATER_THAN]:
self.enterOuterAlt(localctx, 4)
self.state = 1089
self.match(sdpParser.GREATER_THAN)
pass
elif token in [sdpParser.LEFT_BRACE]:
self.enterOuterAlt(localctx, 5)
self.state = 1090
self.match(sdpParser.LEFT_BRACE)
pass
elif token in [sdpParser.RIGHT_BRACE]:
self.enterOuterAlt(localctx, 6)
self.state = 1091
self.match(sdpParser.RIGHT_BRACE)
pass
elif token in [sdpParser.COLON]:
self.enterOuterAlt(localctx, 7)
self.state = 1092
self.match(sdpParser.COLON)
pass
elif token in [sdpParser.SEMICOLON]:
self.enterOuterAlt(localctx, 8)
self.state = 1093
self.match(sdpParser.SEMICOLON)
pass
elif token in [sdpParser.AT]:
self.enterOuterAlt(localctx, 9)
self.state = 1094
self.match(sdpParser.AT)
pass
elif token in [sdpParser.BACKSLASH]:
self.enterOuterAlt(localctx, 10)
self.state = 1095
self.match(sdpParser.BACKSLASH)
pass
elif token in [sdpParser.COMMA]:
self.enterOuterAlt(localctx, 11)
self.state = 1096
self.match(sdpParser.COMMA)
pass
elif token in [sdpParser.PERIOD]:
self.enterOuterAlt(localctx, 12)
self.state = 1097
self.match(sdpParser.PERIOD)
pass
elif token in [sdpParser.QUOTE]:
self.enterOuterAlt(localctx, 13)
self.state = 1098
self.dquote()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QtextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def obs_qtext(self):
return self.getTypedRuleContext(sdpParser.Obs_qtextContext,0)
def getRuleIndex(self):
return sdpParser.RULE_qtext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQtext" ):
listener.enterQtext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQtext" ):
listener.exitQtext(self)
def qtext(self):
localctx = sdpParser.QtextContext(self, self._ctx, self.state)
self.enterRule(localctx, 166, self.RULE_qtext)
self._la = 0 # Token type
try:
self.state = 1105
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION]:
self.enterOuterAlt(localctx, 1)
self.state = 1101
self.match(sdpParser.EXCLAMATION)
pass
elif token in [sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE]:
self.enterOuterAlt(localctx, 2)
self.state = 1102
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.LESS_THAN) | (1 << sdpParser.EQUALS) | (1 << sdpParser.GREATER_THAN) | (1 << sdpParser.QUESTION) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z) | (1 << sdpParser.LEFT_BRACE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE]:
self.enterOuterAlt(localctx, 3)
self.state = 1103
_la = self._input.LA(1)
if not(((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.enterOuterAlt(localctx, 4)
self.state = 1104
self.obs_qtext()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QcontentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def qtext(self):
return self.getTypedRuleContext(sdpParser.QtextContext,0)
def quoted_pair(self):
return self.getTypedRuleContext(sdpParser.Quoted_pairContext,0)
def getRuleIndex(self):
return sdpParser.RULE_qcontent
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQcontent" ):
listener.enterQcontent(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQcontent" ):
listener.exitQcontent(self)
def qcontent(self):
localctx = sdpParser.QcontentContext(self, self._ctx, self.state)
self.enterRule(localctx, 168, self.RULE_qcontent)
try:
self.state = 1109
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE, sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE, sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.enterOuterAlt(localctx, 1)
self.state = 1107
self.qtext()
pass
elif token in [sdpParser.BACKSLASH]:
self.enterOuterAlt(localctx, 2)
self.state = 1108
self.quoted_pair()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Quoted_stringContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def dquote(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DquoteContext)
else:
return self.getTypedRuleContext(sdpParser.DquoteContext,i)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def qcontent(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.QcontentContext)
else:
return self.getTypedRuleContext(sdpParser.QcontentContext,i)
def fws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.FwsContext)
else:
return self.getTypedRuleContext(sdpParser.FwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_quoted_string
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQuoted_string" ):
listener.enterQuoted_string(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQuoted_string" ):
listener.exitQuoted_string(self)
def quoted_string(self):
localctx = sdpParser.Quoted_stringContext(self, self._ctx, self.state)
self.enterRule(localctx, 170, self.RULE_quoted_string)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1112
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1111
self.cfws()
self.state = 1114
self.dquote()
self.state = 1121
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,104,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1116
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1115
self.fws()
self.state = 1118
self.qcontent()
self.state = 1123
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,104,self._ctx)
self.state = 1125
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1124
self.fws()
self.state = 1127
self.dquote()
self.state = 1129
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,106,self._ctx)
if la_ == 1:
self.state = 1128
self.cfws()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class WordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atom(self):
return self.getTypedRuleContext(sdpParser.AtomContext,0)
def quoted_string(self):
return self.getTypedRuleContext(sdpParser.Quoted_stringContext,0)
def getRuleIndex(self):
return sdpParser.RULE_word
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterWord" ):
listener.enterWord(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitWord" ):
listener.exitWord(self)
def word(self):
localctx = sdpParser.WordContext(self, self._ctx, self.state)
self.enterRule(localctx, 172, self.RULE_word)
try:
self.state = 1133
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,107,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1131
self.atom()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1132
self.quoted_string()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PhraseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def word(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.WordContext)
else:
return self.getTypedRuleContext(sdpParser.WordContext,i)
def obs_phrase(self):
return self.getTypedRuleContext(sdpParser.Obs_phraseContext,0)
def getRuleIndex(self):
return sdpParser.RULE_phrase
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPhrase" ):
listener.enterPhrase(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPhrase" ):
listener.exitPhrase(self)
def phrase(self):
localctx = sdpParser.PhraseContext(self, self._ctx, self.state)
self.enterRule(localctx, 174, self.RULE_phrase)
try:
self.state = 1141
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,109,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1136
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1135
self.word()
else:
raise NoViableAltException(self)
self.state = 1138
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,108,self._ctx)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1140
self.obs_phrase()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Quoted_pairContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def BACKSLASH(self):
return self.getToken(sdpParser.BACKSLASH, 0)
def vchar(self):
return self.getTypedRuleContext(sdpParser.VcharContext,0)
def wsp(self):
return self.getTypedRuleContext(sdpParser.WspContext,0)
def obs_qp(self):
return self.getTypedRuleContext(sdpParser.Obs_qpContext,0)
def getRuleIndex(self):
return sdpParser.RULE_quoted_pair
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQuoted_pair" ):
listener.enterQuoted_pair(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQuoted_pair" ):
listener.exitQuoted_pair(self)
def quoted_pair(self):
localctx = sdpParser.Quoted_pairContext(self, self._ctx, self.state)
self.enterRule(localctx, 176, self.RULE_quoted_pair)
try:
self.state = 1149
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,111,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1143
self.match(sdpParser.BACKSLASH)
self.state = 1146
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE, sdpParser.BACKSLASH, sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE]:
self.state = 1144
self.vchar()
pass
elif token in [sdpParser.TAB, sdpParser.SPACE]:
self.state = 1145
self.wsp()
pass
else:
raise NoViableAltException(self)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1148
self.obs_qp()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FwsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def crlf(self):
return self.getTypedRuleContext(sdpParser.CrlfContext,0)
def wsp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.WspContext)
else:
return self.getTypedRuleContext(sdpParser.WspContext,i)
def obs_fws(self):
return self.getTypedRuleContext(sdpParser.Obs_fwsContext,0)
def getRuleIndex(self):
return sdpParser.RULE_fws
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFws" ):
listener.enterFws(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFws" ):
listener.exitFws(self)
def fws(self):
localctx = sdpParser.FwsContext(self, self._ctx, self.state)
self.enterRule(localctx, 178, self.RULE_fws)
self._la = 0 # Token type
try:
self.state = 1166
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,115,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1158
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,113,self._ctx)
if la_ == 1:
self.state = 1154
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.TAB or _la==sdpParser.SPACE:
self.state = 1151
self.wsp()
self.state = 1156
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1157
self.crlf()
self.state = 1161
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1160
self.wsp()
else:
raise NoViableAltException(self)
self.state = 1163
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,114,self._ctx)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1165
self.obs_fws()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CtextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def QUOTE(self):
return self.getToken(sdpParser.QUOTE, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def obs_ctext(self):
return self.getTypedRuleContext(sdpParser.Obs_ctextContext,0)
def getRuleIndex(self):
return sdpParser.RULE_ctext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCtext" ):
listener.enterCtext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCtext" ):
listener.exitCtext(self)
def ctext(self):
localctx = sdpParser.CtextContext(self, self._ctx, self.state)
self.enterRule(localctx, 180, self.RULE_ctext)
self._la = 0 # Token type
try:
self.state = 1172
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE]:
self.enterOuterAlt(localctx, 1)
self.state = 1168
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.QUOTE) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE]:
self.enterOuterAlt(localctx, 2)
self.state = 1169
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.LESS_THAN) | (1 << sdpParser.EQUALS) | (1 << sdpParser.GREATER_THAN) | (1 << sdpParser.QUESTION) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z) | (1 << sdpParser.LEFT_BRACE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE]:
self.enterOuterAlt(localctx, 3)
self.state = 1170
_la = self._input.LA(1)
if not(((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.enterOuterAlt(localctx, 4)
self.state = 1171
self.obs_ctext()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CcontentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ctext(self):
return self.getTypedRuleContext(sdpParser.CtextContext,0)
def quoted_pair(self):
return self.getTypedRuleContext(sdpParser.Quoted_pairContext,0)
def comment(self):
return self.getTypedRuleContext(sdpParser.CommentContext,0)
def getRuleIndex(self):
return sdpParser.RULE_ccontent
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCcontent" ):
listener.enterCcontent(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCcontent" ):
listener.exitCcontent(self)
def ccontent(self):
localctx = sdpParser.CcontentContext(self, self._ctx, self.state)
self.enterRule(localctx, 182, self.RULE_ccontent)
try:
self.state = 1177
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE, sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE, sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.enterOuterAlt(localctx, 1)
self.state = 1174
self.ctext()
pass
elif token in [sdpParser.BACKSLASH]:
self.enterOuterAlt(localctx, 2)
self.state = 1175
self.quoted_pair()
pass
elif token in [sdpParser.LEFT_PAREN]:
self.enterOuterAlt(localctx, 3)
self.state = 1176
self.comment()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CommentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def ccontent(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CcontentContext)
else:
return self.getTypedRuleContext(sdpParser.CcontentContext,i)
def fws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.FwsContext)
else:
return self.getTypedRuleContext(sdpParser.FwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_comment
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterComment" ):
listener.enterComment(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitComment" ):
listener.exitComment(self)
def comment(self):
localctx = sdpParser.CommentContext(self, self._ctx, self.state)
self.enterRule(localctx, 184, self.RULE_comment)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1179
self.match(sdpParser.LEFT_PAREN)
self.state = 1186
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,119,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1181
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1180
self.fws()
self.state = 1183
self.ccontent()
self.state = 1188
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,119,self._ctx)
self.state = 1190
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1189
self.fws()
self.state = 1192
self.match(sdpParser.RIGHT_PAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CfwsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def comment(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CommentContext)
else:
return self.getTypedRuleContext(sdpParser.CommentContext,i)
def fws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.FwsContext)
else:
return self.getTypedRuleContext(sdpParser.FwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_cfws
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCfws" ):
listener.enterCfws(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCfws" ):
listener.exitCfws(self)
def cfws(self):
localctx = sdpParser.CfwsContext(self, self._ctx, self.state)
self.enterRule(localctx, 186, self.RULE_cfws)
self._la = 0 # Token type
try:
self.state = 1206
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,124,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1198
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1195
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1194
self.fws()
self.state = 1197
self.comment()
else:
raise NoViableAltException(self)
self.state = 1200
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,122,self._ctx)
self.state = 1203
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,123,self._ctx)
if la_ == 1:
self.state = 1202
self.fws()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1205
self.fws()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_ctextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def obs_no_ws_ctl(self):
return self.getTypedRuleContext(sdpParser.Obs_no_ws_ctlContext,0)
def getRuleIndex(self):
return sdpParser.RULE_obs_ctext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_ctext" ):
listener.enterObs_ctext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_ctext" ):
listener.exitObs_ctext(self)
def obs_ctext(self):
localctx = sdpParser.Obs_ctextContext(self, self._ctx, self.state)
self.enterRule(localctx, 188, self.RULE_obs_ctext)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1208
self.obs_no_ws_ctl()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_qtextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def obs_no_ws_ctl(self):
return self.getTypedRuleContext(sdpParser.Obs_no_ws_ctlContext,0)
def getRuleIndex(self):
return sdpParser.RULE_obs_qtext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_qtext" ):
listener.enterObs_qtext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_qtext" ):
listener.exitObs_qtext(self)
def obs_qtext(self):
localctx = sdpParser.Obs_qtextContext(self, self._ctx, self.state)
self.enterRule(localctx, 190, self.RULE_obs_qtext)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1210
self.obs_no_ws_ctl()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_utextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U_0000(self):
return self.getToken(sdpParser.U_0000, 0)
def obs_no_ws_ctl(self):
return self.getTypedRuleContext(sdpParser.Obs_no_ws_ctlContext,0)
def vchar(self):
return self.getTypedRuleContext(sdpParser.VcharContext,0)
def getRuleIndex(self):
return sdpParser.RULE_obs_utext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_utext" ):
listener.enterObs_utext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_utext" ):
listener.exitObs_utext(self)
def obs_utext(self):
localctx = sdpParser.Obs_utextContext(self, self._ctx, self.state)
self.enterRule(localctx, 192, self.RULE_obs_utext)
try:
self.state = 1215
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.U_0000]:
self.enterOuterAlt(localctx, 1)
self.state = 1212
self.match(sdpParser.U_0000)
pass
elif token in [sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.enterOuterAlt(localctx, 2)
self.state = 1213
self.obs_no_ws_ctl()
pass
elif token in [sdpParser.EXCLAMATION, sdpParser.QUOTE, sdpParser.HASH, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.SLASH, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.LESS_THAN, sdpParser.EQUALS, sdpParser.GREATER_THAN, sdpParser.QUESTION, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.LEFT_BRACE, sdpParser.BACKSLASH, sdpParser.RIGHT_BRACE, sdpParser.CARAT, sdpParser.UNDERSCORE, sdpParser.ACCENT, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.LEFT_CURLY_BRACE, sdpParser.PIPE, sdpParser.RIGHT_CURLY_BRACE, sdpParser.TILDE]:
self.enterOuterAlt(localctx, 3)
self.state = 1214
self.vchar()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_qpContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def BACKSLASH(self):
return self.getToken(sdpParser.BACKSLASH, 0)
def U_0000(self):
return self.getToken(sdpParser.U_0000, 0)
def obs_no_ws_ctl(self):
return self.getTypedRuleContext(sdpParser.Obs_no_ws_ctlContext,0)
def lf(self):
return self.getTypedRuleContext(sdpParser.LfContext,0)
def cr(self):
return self.getTypedRuleContext(sdpParser.CrContext,0)
def getRuleIndex(self):
return sdpParser.RULE_obs_qp
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_qp" ):
listener.enterObs_qp(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_qp" ):
listener.exitObs_qp(self)
def obs_qp(self):
localctx = sdpParser.Obs_qpContext(self, self._ctx, self.state)
self.enterRule(localctx, 194, self.RULE_obs_qp)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1217
self.match(sdpParser.BACKSLASH)
self.state = 1222
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.U_0000]:
self.state = 1218
self.match(sdpParser.U_0000)
pass
elif token in [sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.state = 1219
self.obs_no_ws_ctl()
pass
elif token in [sdpParser.LF]:
self.state = 1220
self.lf()
pass
elif token in [sdpParser.CR]:
self.state = 1221
self.cr()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_phraseContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def word(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.WordContext)
else:
return self.getTypedRuleContext(sdpParser.WordContext,i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_obs_phrase
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_phrase" ):
listener.enterObs_phrase(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_phrase" ):
listener.exitObs_phrase(self)
def obs_phrase(self):
localctx = sdpParser.Obs_phraseContext(self, self._ctx, self.state)
self.enterRule(localctx, 196, self.RULE_obs_phrase)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1224
self.word()
self.state = 1230
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,128,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1228
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,127,self._ctx)
if la_ == 1:
self.state = 1225
self.word()
pass
elif la_ == 2:
self.state = 1226
self.match(sdpParser.PERIOD)
pass
elif la_ == 3:
self.state = 1227
self.cfws()
pass
self.state = 1232
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,128,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_phrase_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def phrase(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.PhraseContext)
else:
return self.getTypedRuleContext(sdpParser.PhraseContext,i)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def getRuleIndex(self):
return sdpParser.RULE_obs_phrase_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_phrase_list" ):
listener.enterObs_phrase_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_phrase_list" ):
listener.exitObs_phrase_list(self)
def obs_phrase_list(self):
localctx = sdpParser.Obs_phrase_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 198, self.RULE_obs_phrase_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1235
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,129,self._ctx)
if la_ == 1:
self.state = 1233
self.phrase()
elif la_ == 2:
self.state = 1234
self.cfws()
self.state = 1244
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.COMMA:
self.state = 1237
self.match(sdpParser.COMMA)
self.state = 1240
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,130,self._ctx)
if la_ == 1:
self.state = 1238
self.phrase()
elif la_ == 2:
self.state = 1239
self.cfws()
self.state = 1246
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_angle_addrContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def obs_route(self):
return self.getTypedRuleContext(sdpParser.Obs_routeContext,0)
def addr_spec(self):
return self.getTypedRuleContext(sdpParser.Addr_specContext,0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_obs_angle_addr
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_angle_addr" ):
listener.enterObs_angle_addr(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_angle_addr" ):
listener.exitObs_angle_addr(self)
def obs_angle_addr(self):
localctx = sdpParser.Obs_angle_addrContext(self, self._ctx, self.state)
self.enterRule(localctx, 200, self.RULE_obs_angle_addr)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1248
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1247
self.cfws()
self.state = 1250
self.match(sdpParser.LESS_THAN)
self.state = 1251
self.obs_route()
self.state = 1252
self.addr_spec()
self.state = 1253
self.match(sdpParser.GREATER_THAN)
self.state = 1255
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1254
self.cfws()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_routeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def obs_domain_list(self):
return self.getTypedRuleContext(sdpParser.Obs_domain_listContext,0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def getRuleIndex(self):
return sdpParser.RULE_obs_route
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_route" ):
listener.enterObs_route(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_route" ):
listener.exitObs_route(self)
def obs_route(self):
localctx = sdpParser.Obs_routeContext(self, self._ctx, self.state)
self.enterRule(localctx, 202, self.RULE_obs_route)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1257
self.obs_domain_list()
self.state = 1258
self.match(sdpParser.COLON)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_domain_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def AT(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.AT)
else:
return self.getToken(sdpParser.AT, i)
def domain(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DomainContext)
else:
return self.getTypedRuleContext(sdpParser.DomainContext,i)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def getRuleIndex(self):
return sdpParser.RULE_obs_domain_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_domain_list" ):
listener.enterObs_domain_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_domain_list" ):
listener.exitObs_domain_list(self)
def obs_domain_list(self):
localctx = sdpParser.Obs_domain_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 204, self.RULE_obs_domain_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1264
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.COMMA))) != 0):
self.state = 1262
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.TAB, sdpParser.CR, sdpParser.SPACE, sdpParser.LEFT_PAREN]:
self.state = 1260
self.cfws()
pass
elif token in [sdpParser.COMMA]:
self.state = 1261
self.match(sdpParser.COMMA)
pass
else:
raise NoViableAltException(self)
self.state = 1266
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 1267
self.match(sdpParser.AT)
self.state = 1268
self.domain()
self.state = 1279
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.COMMA:
self.state = 1269
self.match(sdpParser.COMMA)
self.state = 1271
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1270
self.cfws()
self.state = 1275
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.AT:
self.state = 1273
self.match(sdpParser.AT)
self.state = 1274
self.domain()
self.state = 1281
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_mbox_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def mailbox(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.MailboxContext)
else:
return self.getTypedRuleContext(sdpParser.MailboxContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_obs_mbox_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_mbox_list" ):
listener.enterObs_mbox_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_mbox_list" ):
listener.exitObs_mbox_list(self)
def obs_mbox_list(self):
localctx = sdpParser.Obs_mbox_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 206, self.RULE_obs_mbox_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1288
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,140,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1283
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1282
self.cfws()
self.state = 1285
self.match(sdpParser.COMMA)
self.state = 1290
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,140,self._ctx)
self.state = 1291
self.mailbox()
self.state = 1299
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.COMMA:
self.state = 1292
self.match(sdpParser.COMMA)
self.state = 1295
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,141,self._ctx)
if la_ == 1:
self.state = 1293
self.mailbox()
elif la_ == 2:
self.state = 1294
self.cfws()
self.state = 1301
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_addr_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def address(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.AddressContext)
else:
return self.getTypedRuleContext(sdpParser.AddressContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_obs_addr_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_addr_list" ):
listener.enterObs_addr_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_addr_list" ):
listener.exitObs_addr_list(self)
def obs_addr_list(self):
localctx = sdpParser.Obs_addr_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 208, self.RULE_obs_addr_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1308
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,144,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1303
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1302
self.cfws()
self.state = 1305
self.match(sdpParser.COMMA)
self.state = 1310
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,144,self._ctx)
self.state = 1311
self.address()
self.state = 1319
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.COMMA:
self.state = 1312
self.match(sdpParser.COMMA)
self.state = 1315
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,145,self._ctx)
if la_ == 1:
self.state = 1313
self.address()
elif la_ == 2:
self.state = 1314
self.cfws()
self.state = 1321
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_group_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_obs_group_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_group_list" ):
listener.enterObs_group_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_group_list" ):
listener.exitObs_group_list(self)
def obs_group_list(self):
localctx = sdpParser.Obs_group_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 210, self.RULE_obs_group_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1326
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1323
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1322
self.cfws()
self.state = 1325
self.match(sdpParser.COMMA)
else:
raise NoViableAltException(self)
self.state = 1328
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,148,self._ctx)
self.state = 1331
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1330
self.cfws()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_local_partContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def word(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.WordContext)
else:
return self.getTypedRuleContext(sdpParser.WordContext,i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def getRuleIndex(self):
return sdpParser.RULE_obs_local_part
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_local_part" ):
listener.enterObs_local_part(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_local_part" ):
listener.exitObs_local_part(self)
def obs_local_part(self):
localctx = sdpParser.Obs_local_partContext(self, self._ctx, self.state)
self.enterRule(localctx, 212, self.RULE_obs_local_part)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1333
self.word()
self.state = 1338
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.PERIOD:
self.state = 1334
self.match(sdpParser.PERIOD)
self.state = 1335
self.word()
self.state = 1340
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_domainContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atom(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.AtomContext)
else:
return self.getTypedRuleContext(sdpParser.AtomContext,i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def getRuleIndex(self):
return sdpParser.RULE_obs_domain
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_domain" ):
listener.enterObs_domain(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_domain" ):
listener.exitObs_domain(self)
def obs_domain(self):
localctx = sdpParser.Obs_domainContext(self, self._ctx, self.state)
self.enterRule(localctx, 214, self.RULE_obs_domain)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1341
self.atom()
self.state = 1346
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.PERIOD:
self.state = 1342
self.match(sdpParser.PERIOD)
self.state = 1343
self.atom()
self.state = 1348
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_dtextContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def obs_no_ws_ctl(self):
return self.getTypedRuleContext(sdpParser.Obs_no_ws_ctlContext,0)
def quoted_pair(self):
return self.getTypedRuleContext(sdpParser.Quoted_pairContext,0)
def getRuleIndex(self):
return sdpParser.RULE_obs_dtext
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_dtext" ):
listener.enterObs_dtext(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_dtext" ):
listener.exitObs_dtext(self)
def obs_dtext(self):
localctx = sdpParser.Obs_dtextContext(self, self._ctx, self.state)
self.enterRule(localctx, 216, self.RULE_obs_dtext)
try:
self.state = 1351
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F, sdpParser.U_007F]:
self.enterOuterAlt(localctx, 1)
self.state = 1349
self.obs_no_ws_ctl()
pass
elif token in [sdpParser.BACKSLASH]:
self.enterOuterAlt(localctx, 2)
self.state = 1350
self.quoted_pair()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_fwsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def wsp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.WspContext)
else:
return self.getTypedRuleContext(sdpParser.WspContext,i)
def crlf(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CrlfContext)
else:
return self.getTypedRuleContext(sdpParser.CrlfContext,i)
def getRuleIndex(self):
return sdpParser.RULE_obs_fws
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_fws" ):
listener.enterObs_fws(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_fws" ):
listener.exitObs_fws(self)
def obs_fws(self):
localctx = sdpParser.Obs_fwsContext(self, self._ctx, self.state)
self.enterRule(localctx, 218, self.RULE_obs_fws)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1354
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1353
self.wsp()
else:
raise NoViableAltException(self)
self.state = 1356
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,153,self._ctx)
self.state = 1366
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,155,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 1358
self.crlf()
self.state = 1360
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 1359
self.wsp()
else:
raise NoViableAltException(self)
self.state = 1362
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,154,self._ctx)
self.state = 1368
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,155,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Obs_no_ws_ctlContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U_0001(self):
return self.getToken(sdpParser.U_0001, 0)
def U_0002(self):
return self.getToken(sdpParser.U_0002, 0)
def U_0003(self):
return self.getToken(sdpParser.U_0003, 0)
def U_0004(self):
return self.getToken(sdpParser.U_0004, 0)
def U_0005(self):
return self.getToken(sdpParser.U_0005, 0)
def U_0006(self):
return self.getToken(sdpParser.U_0006, 0)
def U_0007(self):
return self.getToken(sdpParser.U_0007, 0)
def U_0008(self):
return self.getToken(sdpParser.U_0008, 0)
def U_000B(self):
return self.getToken(sdpParser.U_000B, 0)
def U_000C(self):
return self.getToken(sdpParser.U_000C, 0)
def U_000E(self):
return self.getToken(sdpParser.U_000E, 0)
def U_000F(self):
return self.getToken(sdpParser.U_000F, 0)
def U_0010(self):
return self.getToken(sdpParser.U_0010, 0)
def U_0011(self):
return self.getToken(sdpParser.U_0011, 0)
def U_0012(self):
return self.getToken(sdpParser.U_0012, 0)
def U_0013(self):
return self.getToken(sdpParser.U_0013, 0)
def U_0014(self):
return self.getToken(sdpParser.U_0014, 0)
def U_0015(self):
return self.getToken(sdpParser.U_0015, 0)
def U_0016(self):
return self.getToken(sdpParser.U_0016, 0)
def U_0017(self):
return self.getToken(sdpParser.U_0017, 0)
def U_0018(self):
return self.getToken(sdpParser.U_0018, 0)
def U_0019(self):
return self.getToken(sdpParser.U_0019, 0)
def U_001A(self):
return self.getToken(sdpParser.U_001A, 0)
def U_001B(self):
return self.getToken(sdpParser.U_001B, 0)
def U_001C(self):
return self.getToken(sdpParser.U_001C, 0)
def U_001D(self):
return self.getToken(sdpParser.U_001D, 0)
def U_001E(self):
return self.getToken(sdpParser.U_001E, 0)
def U_001F(self):
return self.getToken(sdpParser.U_001F, 0)
def U_007F(self):
return self.getToken(sdpParser.U_007F, 0)
def getRuleIndex(self):
return sdpParser.RULE_obs_no_ws_ctl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterObs_no_ws_ctl" ):
listener.enterObs_no_ws_ctl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitObs_no_ws_ctl" ):
listener.exitObs_no_ws_ctl(self)
def obs_no_ws_ctl(self):
localctx = sdpParser.Obs_no_ws_ctlContext(self, self._ctx, self.state)
self.enterRule(localctx, 220, self.RULE_obs_no_ws_ctl)
self._la = 0 # Token type
try:
self.state = 1374
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008]:
self.enterOuterAlt(localctx, 1)
self.state = 1369
_la = self._input.LA(1)
if not(((((_la - 100)) & ~0x3f) == 0 and ((1 << (_la - 100)) & ((1 << (sdpParser.U_0001 - 100)) | (1 << (sdpParser.U_0002 - 100)) | (1 << (sdpParser.U_0003 - 100)) | (1 << (sdpParser.U_0004 - 100)) | (1 << (sdpParser.U_0005 - 100)) | (1 << (sdpParser.U_0006 - 100)) | (1 << (sdpParser.U_0007 - 100)) | (1 << (sdpParser.U_0008 - 100)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.U_000B]:
self.enterOuterAlt(localctx, 2)
self.state = 1370
self.match(sdpParser.U_000B)
pass
elif token in [sdpParser.U_000C]:
self.enterOuterAlt(localctx, 3)
self.state = 1371
self.match(sdpParser.U_000C)
pass
elif token in [sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F]:
self.enterOuterAlt(localctx, 4)
self.state = 1372
_la = self._input.LA(1)
if not(((((_la - 110)) & ~0x3f) == 0 and ((1 << (_la - 110)) & ((1 << (sdpParser.U_000E - 110)) | (1 << (sdpParser.U_000F - 110)) | (1 << (sdpParser.U_0010 - 110)) | (1 << (sdpParser.U_0011 - 110)) | (1 << (sdpParser.U_0012 - 110)) | (1 << (sdpParser.U_0013 - 110)) | (1 << (sdpParser.U_0014 - 110)) | (1 << (sdpParser.U_0015 - 110)) | (1 << (sdpParser.U_0016 - 110)) | (1 << (sdpParser.U_0017 - 110)) | (1 << (sdpParser.U_0018 - 110)) | (1 << (sdpParser.U_0019 - 110)) | (1 << (sdpParser.U_001A - 110)) | (1 << (sdpParser.U_001B - 110)) | (1 << (sdpParser.U_001C - 110)) | (1 << (sdpParser.U_001D - 110)) | (1 << (sdpParser.U_001E - 110)) | (1 << (sdpParser.U_001F - 110)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.U_007F]:
self.enterOuterAlt(localctx, 5)
self.state = 1373
self.match(sdpParser.U_007F)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AddressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def mailbox(self):
return self.getTypedRuleContext(sdpParser.MailboxContext,0)
def group(self):
return self.getTypedRuleContext(sdpParser.GroupContext,0)
def getRuleIndex(self):
return sdpParser.RULE_address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAddress" ):
listener.enterAddress(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAddress" ):
listener.exitAddress(self)
def address(self):
localctx = sdpParser.AddressContext(self, self._ctx, self.state)
self.enterRule(localctx, 222, self.RULE_address)
try:
self.state = 1378
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,157,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1376
self.mailbox()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1377
self.group()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MailboxContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name_addr(self):
return self.getTypedRuleContext(sdpParser.Name_addrContext,0)
def addr_spec(self):
return self.getTypedRuleContext(sdpParser.Addr_specContext,0)
def getRuleIndex(self):
return sdpParser.RULE_mailbox
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMailbox" ):
listener.enterMailbox(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMailbox" ):
listener.exitMailbox(self)
def mailbox(self):
localctx = sdpParser.MailboxContext(self, self._ctx, self.state)
self.enterRule(localctx, 224, self.RULE_mailbox)
try:
self.state = 1382
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,158,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1380
self.name_addr()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1381
self.addr_spec()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Name_addrContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def angle_addr(self):
return self.getTypedRuleContext(sdpParser.Angle_addrContext,0)
def display_name(self):
return self.getTypedRuleContext(sdpParser.Display_nameContext,0)
def getRuleIndex(self):
return sdpParser.RULE_name_addr
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterName_addr" ):
listener.enterName_addr(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitName_addr" ):
listener.exitName_addr(self)
def name_addr(self):
localctx = sdpParser.Name_addrContext(self, self._ctx, self.state)
self.enterRule(localctx, 226, self.RULE_name_addr)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1385
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,159,self._ctx)
if la_ == 1:
self.state = 1384
self.display_name()
self.state = 1387
self.angle_addr()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Angle_addrContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def addr_spec(self):
return self.getTypedRuleContext(sdpParser.Addr_specContext,0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def cfws(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CfwsContext)
else:
return self.getTypedRuleContext(sdpParser.CfwsContext,i)
def obs_angle_addr(self):
return self.getTypedRuleContext(sdpParser.Obs_angle_addrContext,0)
def getRuleIndex(self):
return sdpParser.RULE_angle_addr
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAngle_addr" ):
listener.enterAngle_addr(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAngle_addr" ):
listener.exitAngle_addr(self)
def angle_addr(self):
localctx = sdpParser.Angle_addrContext(self, self._ctx, self.state)
self.enterRule(localctx, 228, self.RULE_angle_addr)
self._la = 0 # Token type
try:
self.state = 1399
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,162,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1390
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1389
self.cfws()
self.state = 1392
self.match(sdpParser.LESS_THAN)
self.state = 1393
self.addr_spec()
self.state = 1394
self.match(sdpParser.GREATER_THAN)
self.state = 1396
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1395
self.cfws()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1398
self.obs_angle_addr()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class GroupContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def display_name(self):
return self.getTypedRuleContext(sdpParser.Display_nameContext,0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def group_list(self):
return self.getTypedRuleContext(sdpParser.Group_listContext,0)
def cfws(self):
return self.getTypedRuleContext(sdpParser.CfwsContext,0)
def getRuleIndex(self):
return sdpParser.RULE_group
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGroup" ):
listener.enterGroup(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGroup" ):
listener.exitGroup(self)
def group(self):
localctx = sdpParser.GroupContext(self, self._ctx, self.state)
self.enterRule(localctx, 230, self.RULE_group)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1401
self.display_name()
self.state = 1402
self.match(sdpParser.COLON)
self.state = 1404
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.EXCLAMATION) | (1 << sdpParser.QUOTE) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.LESS_THAN) | (1 << sdpParser.EQUALS) | (1 << sdpParser.QUESTION) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 66)) & ~0x3f) == 0 and ((1 << (_la - 66)) & ((1 << (sdpParser.CARAT - 66)) | (1 << (sdpParser.UNDERSCORE - 66)) | (1 << (sdpParser.ACCENT - 66)) | (1 << (sdpParser.A - 66)) | (1 << (sdpParser.B - 66)) | (1 << (sdpParser.C - 66)) | (1 << (sdpParser.D - 66)) | (1 << (sdpParser.E - 66)) | (1 << (sdpParser.F - 66)) | (1 << (sdpParser.G - 66)) | (1 << (sdpParser.H - 66)) | (1 << (sdpParser.I - 66)) | (1 << (sdpParser.J - 66)) | (1 << (sdpParser.K - 66)) | (1 << (sdpParser.L - 66)) | (1 << (sdpParser.M - 66)) | (1 << (sdpParser.N - 66)) | (1 << (sdpParser.O - 66)) | (1 << (sdpParser.P - 66)) | (1 << (sdpParser.Q - 66)) | (1 << (sdpParser.R - 66)) | (1 << (sdpParser.S - 66)) | (1 << (sdpParser.T - 66)) | (1 << (sdpParser.U - 66)) | (1 << (sdpParser.V - 66)) | (1 << (sdpParser.W - 66)) | (1 << (sdpParser.X - 66)) | (1 << (sdpParser.Y - 66)) | (1 << (sdpParser.Z - 66)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 66)) | (1 << (sdpParser.PIPE - 66)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 66)) | (1 << (sdpParser.TILDE - 66)))) != 0):
self.state = 1403
self.group_list()
self.state = 1406
self.match(sdpParser.SEMICOLON)
self.state = 1408
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE) | (1 << sdpParser.LEFT_PAREN))) != 0):
self.state = 1407
self.cfws()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Display_nameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def phrase(self):
return self.getTypedRuleContext(sdpParser.PhraseContext,0)
def getRuleIndex(self):
return sdpParser.RULE_display_name
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDisplay_name" ):
listener.enterDisplay_name(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDisplay_name" ):
listener.exitDisplay_name(self)
def display_name(self):
localctx = sdpParser.Display_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 232, self.RULE_display_name)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1410
self.phrase()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Mailbox_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def mailbox(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.MailboxContext)
else:
return self.getTypedRuleContext(sdpParser.MailboxContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def obs_mbox_list(self):
return self.getTypedRuleContext(sdpParser.Obs_mbox_listContext,0)
def getRuleIndex(self):
return sdpParser.RULE_mailbox_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMailbox_list" ):
listener.enterMailbox_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMailbox_list" ):
listener.exitMailbox_list(self)
def mailbox_list(self):
localctx = sdpParser.Mailbox_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 234, self.RULE_mailbox_list)
self._la = 0 # Token type
try:
self.state = 1421
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,166,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1412
self.mailbox()
self.state = 1417
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.COMMA:
self.state = 1413
self.match(sdpParser.COMMA)
self.state = 1414
self.mailbox()
self.state = 1419
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1420
self.obs_mbox_list()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Address_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def address(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.AddressContext)
else:
return self.getTypedRuleContext(sdpParser.AddressContext,i)
def COMMA(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COMMA)
else:
return self.getToken(sdpParser.COMMA, i)
def obs_addr_list(self):
return self.getTypedRuleContext(sdpParser.Obs_addr_listContext,0)
def getRuleIndex(self):
return sdpParser.RULE_address_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAddress_list" ):
listener.enterAddress_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAddress_list" ):
listener.exitAddress_list(self)
def address_list(self):
localctx = sdpParser.Address_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 236, self.RULE_address_list)
self._la = 0 # Token type
try:
self.state = 1432
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,168,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1423
self.address()
self.state = 1428
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.COMMA:
self.state = 1424
self.match(sdpParser.COMMA)
self.state = 1425
self.address()
self.state = 1430
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1431
self.obs_addr_list()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Group_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def mailbox_list(self):
return self.getTypedRuleContext(sdpParser.Mailbox_listContext,0)
def cfws(self):
return self.getTypedRuleContext(sdpParser.CfwsContext,0)
def obs_group_list(self):
return self.getTypedRuleContext(sdpParser.Obs_group_listContext,0)
def getRuleIndex(self):
return sdpParser.RULE_group_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGroup_list" ):
listener.enterGroup_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGroup_list" ):
listener.exitGroup_list(self)
def group_list(self):
localctx = sdpParser.Group_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 238, self.RULE_group_list)
try:
self.state = 1437
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,169,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1434
self.mailbox_list()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1435
self.cfws()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1436
self.obs_group_list()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AlphaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def getRuleIndex(self):
return sdpParser.RULE_alpha
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAlpha" ):
listener.enterAlpha(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAlpha" ):
listener.exitAlpha(self)
def alpha(self):
localctx = sdpParser.AlphaContext(self, self._ctx, self.state)
self.enterRule(localctx, 240, self.RULE_alpha)
self._la = 0 # Token type
try:
self.state = 1441
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z]:
self.enterOuterAlt(localctx, 1)
self.state = 1439
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.enterOuterAlt(localctx, 2)
self.state = 1440
_la = self._input.LA(1)
if not(((((_la - 69)) & ~0x3f) == 0 and ((1 << (_la - 69)) & ((1 << (sdpParser.A - 69)) | (1 << (sdpParser.B - 69)) | (1 << (sdpParser.C - 69)) | (1 << (sdpParser.D - 69)) | (1 << (sdpParser.E - 69)) | (1 << (sdpParser.F - 69)) | (1 << (sdpParser.G - 69)) | (1 << (sdpParser.H - 69)) | (1 << (sdpParser.I - 69)) | (1 << (sdpParser.J - 69)) | (1 << (sdpParser.K - 69)) | (1 << (sdpParser.L - 69)) | (1 << (sdpParser.M - 69)) | (1 << (sdpParser.N - 69)) | (1 << (sdpParser.O - 69)) | (1 << (sdpParser.P - 69)) | (1 << (sdpParser.Q - 69)) | (1 << (sdpParser.R - 69)) | (1 << (sdpParser.S - 69)) | (1 << (sdpParser.T - 69)) | (1 << (sdpParser.U - 69)) | (1 << (sdpParser.V - 69)) | (1 << (sdpParser.W - 69)) | (1 << (sdpParser.X - 69)) | (1 << (sdpParser.Y - 69)) | (1 << (sdpParser.Z - 69)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def getRuleIndex(self):
return sdpParser.RULE_bit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBit" ):
listener.enterBit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBit" ):
listener.exitBit(self)
def bit(self):
localctx = sdpParser.BitContext(self, self._ctx, self.state)
self.enterRule(localctx, 242, self.RULE_bit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1443
_la = self._input.LA(1)
if not(_la==sdpParser.ZERO or _la==sdpParser.ONE):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Char_1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U_0001(self):
return self.getToken(sdpParser.U_0001, 0)
def U_0002(self):
return self.getToken(sdpParser.U_0002, 0)
def U_0003(self):
return self.getToken(sdpParser.U_0003, 0)
def U_0004(self):
return self.getToken(sdpParser.U_0004, 0)
def U_0005(self):
return self.getToken(sdpParser.U_0005, 0)
def U_0006(self):
return self.getToken(sdpParser.U_0006, 0)
def U_0007(self):
return self.getToken(sdpParser.U_0007, 0)
def U_0008(self):
return self.getToken(sdpParser.U_0008, 0)
def TAB(self):
return self.getToken(sdpParser.TAB, 0)
def LF(self):
return self.getToken(sdpParser.LF, 0)
def U_000B(self):
return self.getToken(sdpParser.U_000B, 0)
def U_000C(self):
return self.getToken(sdpParser.U_000C, 0)
def CR(self):
return self.getToken(sdpParser.CR, 0)
def U_000E(self):
return self.getToken(sdpParser.U_000E, 0)
def U_000F(self):
return self.getToken(sdpParser.U_000F, 0)
def U_0010(self):
return self.getToken(sdpParser.U_0010, 0)
def U_0011(self):
return self.getToken(sdpParser.U_0011, 0)
def U_0012(self):
return self.getToken(sdpParser.U_0012, 0)
def U_0013(self):
return self.getToken(sdpParser.U_0013, 0)
def U_0014(self):
return self.getToken(sdpParser.U_0014, 0)
def U_0015(self):
return self.getToken(sdpParser.U_0015, 0)
def U_0016(self):
return self.getToken(sdpParser.U_0016, 0)
def U_0017(self):
return self.getToken(sdpParser.U_0017, 0)
def U_0018(self):
return self.getToken(sdpParser.U_0018, 0)
def U_0019(self):
return self.getToken(sdpParser.U_0019, 0)
def U_001A(self):
return self.getToken(sdpParser.U_001A, 0)
def U_001B(self):
return self.getToken(sdpParser.U_001B, 0)
def U_001C(self):
return self.getToken(sdpParser.U_001C, 0)
def U_001D(self):
return self.getToken(sdpParser.U_001D, 0)
def U_001E(self):
return self.getToken(sdpParser.U_001E, 0)
def U_001F(self):
return self.getToken(sdpParser.U_001F, 0)
def SPACE(self):
return self.getToken(sdpParser.SPACE, 0)
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def QUOTE(self):
return self.getToken(sdpParser.QUOTE, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def BACKSLASH(self):
return self.getToken(sdpParser.BACKSLASH, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def U_007F(self):
return self.getToken(sdpParser.U_007F, 0)
def getRuleIndex(self):
return sdpParser.RULE_char_1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterChar_1" ):
listener.enterChar_1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitChar_1" ):
listener.exitChar_1(self)
def char_1(self):
localctx = sdpParser.Char_1Context(self, self._ctx, self.state)
self.enterRule(localctx, 244, self.RULE_char_1)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1445
_la = self._input.LA(1)
if not(((((_la - 1)) & ~0x3f) == 0 and ((1 << (_la - 1)) & ((1 << (sdpParser.TAB - 1)) | (1 << (sdpParser.LF - 1)) | (1 << (sdpParser.CR - 1)) | (1 << (sdpParser.SPACE - 1)) | (1 << (sdpParser.EXCLAMATION - 1)) | (1 << (sdpParser.QUOTE - 1)) | (1 << (sdpParser.HASH - 1)) | (1 << (sdpParser.DOLLAR - 1)) | (1 << (sdpParser.PERCENT - 1)) | (1 << (sdpParser.AMPERSAND - 1)) | (1 << (sdpParser.APOSTROPHE - 1)) | (1 << (sdpParser.LEFT_PAREN - 1)) | (1 << (sdpParser.RIGHT_PAREN - 1)) | (1 << (sdpParser.ASTERISK - 1)) | (1 << (sdpParser.PLUS - 1)) | (1 << (sdpParser.COMMA - 1)) | (1 << (sdpParser.DASH - 1)) | (1 << (sdpParser.PERIOD - 1)) | (1 << (sdpParser.SLASH - 1)) | (1 << (sdpParser.ZERO - 1)) | (1 << (sdpParser.ONE - 1)) | (1 << (sdpParser.TWO - 1)) | (1 << (sdpParser.THREE - 1)) | (1 << (sdpParser.FOUR - 1)) | (1 << (sdpParser.FIVE - 1)) | (1 << (sdpParser.SIX - 1)) | (1 << (sdpParser.SEVEN - 1)) | (1 << (sdpParser.EIGHT - 1)) | (1 << (sdpParser.NINE - 1)) | (1 << (sdpParser.COLON - 1)) | (1 << (sdpParser.SEMICOLON - 1)) | (1 << (sdpParser.LESS_THAN - 1)) | (1 << (sdpParser.EQUALS - 1)) | (1 << (sdpParser.GREATER_THAN - 1)) | (1 << (sdpParser.QUESTION - 1)) | (1 << (sdpParser.AT - 1)) | (1 << (sdpParser.CAP_A - 1)) | (1 << (sdpParser.CAP_B - 1)) | (1 << (sdpParser.CAP_C - 1)) | (1 << (sdpParser.CAP_D - 1)) | (1 << (sdpParser.CAP_E - 1)) | (1 << (sdpParser.CAP_F - 1)) | (1 << (sdpParser.CAP_G - 1)) | (1 << (sdpParser.CAP_H - 1)) | (1 << (sdpParser.CAP_I - 1)) | (1 << (sdpParser.CAP_J - 1)) | (1 << (sdpParser.CAP_K - 1)) | (1 << (sdpParser.CAP_L - 1)) | (1 << (sdpParser.CAP_M - 1)) | (1 << (sdpParser.CAP_N - 1)) | (1 << (sdpParser.CAP_O - 1)) | (1 << (sdpParser.CAP_P - 1)) | (1 << (sdpParser.CAP_Q - 1)) | (1 << (sdpParser.CAP_R - 1)) | (1 << (sdpParser.CAP_S - 1)) | (1 << (sdpParser.CAP_T - 1)) | (1 << (sdpParser.CAP_U - 1)) | (1 << (sdpParser.CAP_V - 1)) | (1 << (sdpParser.CAP_W - 1)) | (1 << (sdpParser.CAP_X - 1)) | (1 << (sdpParser.CAP_Y - 1)) | (1 << (sdpParser.CAP_Z - 1)) | (1 << (sdpParser.LEFT_BRACE - 1)) | (1 << (sdpParser.BACKSLASH - 1)))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)) | (1 << (sdpParser.U_0001 - 65)) | (1 << (sdpParser.U_0002 - 65)) | (1 << (sdpParser.U_0003 - 65)) | (1 << (sdpParser.U_0004 - 65)) | (1 << (sdpParser.U_0005 - 65)) | (1 << (sdpParser.U_0006 - 65)) | (1 << (sdpParser.U_0007 - 65)) | (1 << (sdpParser.U_0008 - 65)) | (1 << (sdpParser.U_000B - 65)) | (1 << (sdpParser.U_000C - 65)) | (1 << (sdpParser.U_000E - 65)) | (1 << (sdpParser.U_000F - 65)) | (1 << (sdpParser.U_0010 - 65)) | (1 << (sdpParser.U_0011 - 65)) | (1 << (sdpParser.U_0012 - 65)) | (1 << (sdpParser.U_0013 - 65)) | (1 << (sdpParser.U_0014 - 65)) | (1 << (sdpParser.U_0015 - 65)) | (1 << (sdpParser.U_0016 - 65)) | (1 << (sdpParser.U_0017 - 65)) | (1 << (sdpParser.U_0018 - 65)) | (1 << (sdpParser.U_0019 - 65)) | (1 << (sdpParser.U_001A - 65)) | (1 << (sdpParser.U_001B - 65)) | (1 << (sdpParser.U_001C - 65)) | (1 << (sdpParser.U_001D - 65)) | (1 << (sdpParser.U_001E - 65)) | (1 << (sdpParser.U_001F - 65)) | (1 << (sdpParser.U_007F - 65)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CrContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CR(self):
return self.getToken(sdpParser.CR, 0)
def getRuleIndex(self):
return sdpParser.RULE_cr
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCr" ):
listener.enterCr(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCr" ):
listener.exitCr(self)
def cr(self):
localctx = sdpParser.CrContext(self, self._ctx, self.state)
self.enterRule(localctx, 246, self.RULE_cr)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1447
self.match(sdpParser.CR)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CrlfContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def cr(self):
return self.getTypedRuleContext(sdpParser.CrContext,0)
def lf(self):
return self.getTypedRuleContext(sdpParser.LfContext,0)
def getRuleIndex(self):
return sdpParser.RULE_crlf
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCrlf" ):
listener.enterCrlf(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCrlf" ):
listener.exitCrlf(self)
def crlf(self):
localctx = sdpParser.CrlfContext(self, self._ctx, self.state)
self.enterRule(localctx, 248, self.RULE_crlf)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1449
self.cr()
self.state = 1450
self.lf()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CtlContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U_0000(self):
return self.getToken(sdpParser.U_0000, 0)
def U_0001(self):
return self.getToken(sdpParser.U_0001, 0)
def U_0002(self):
return self.getToken(sdpParser.U_0002, 0)
def U_0003(self):
return self.getToken(sdpParser.U_0003, 0)
def U_0004(self):
return self.getToken(sdpParser.U_0004, 0)
def U_0005(self):
return self.getToken(sdpParser.U_0005, 0)
def U_0006(self):
return self.getToken(sdpParser.U_0006, 0)
def U_0007(self):
return self.getToken(sdpParser.U_0007, 0)
def U_0008(self):
return self.getToken(sdpParser.U_0008, 0)
def TAB(self):
return self.getToken(sdpParser.TAB, 0)
def LF(self):
return self.getToken(sdpParser.LF, 0)
def U_000B(self):
return self.getToken(sdpParser.U_000B, 0)
def U_000C(self):
return self.getToken(sdpParser.U_000C, 0)
def CR(self):
return self.getToken(sdpParser.CR, 0)
def U_000E(self):
return self.getToken(sdpParser.U_000E, 0)
def U_000F(self):
return self.getToken(sdpParser.U_000F, 0)
def U_0010(self):
return self.getToken(sdpParser.U_0010, 0)
def U_0011(self):
return self.getToken(sdpParser.U_0011, 0)
def U_0012(self):
return self.getToken(sdpParser.U_0012, 0)
def U_0013(self):
return self.getToken(sdpParser.U_0013, 0)
def U_0014(self):
return self.getToken(sdpParser.U_0014, 0)
def U_0015(self):
return self.getToken(sdpParser.U_0015, 0)
def U_0016(self):
return self.getToken(sdpParser.U_0016, 0)
def U_0017(self):
return self.getToken(sdpParser.U_0017, 0)
def U_0018(self):
return self.getToken(sdpParser.U_0018, 0)
def U_0019(self):
return self.getToken(sdpParser.U_0019, 0)
def U_001A(self):
return self.getToken(sdpParser.U_001A, 0)
def U_001B(self):
return self.getToken(sdpParser.U_001B, 0)
def U_001C(self):
return self.getToken(sdpParser.U_001C, 0)
def U_001D(self):
return self.getToken(sdpParser.U_001D, 0)
def U_001E(self):
return self.getToken(sdpParser.U_001E, 0)
def U_001F(self):
return self.getToken(sdpParser.U_001F, 0)
def U_007F(self):
return self.getToken(sdpParser.U_007F, 0)
def getRuleIndex(self):
return sdpParser.RULE_ctl
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCtl" ):
listener.enterCtl(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCtl" ):
listener.exitCtl(self)
def ctl(self):
localctx = sdpParser.CtlContext(self, self._ctx, self.state)
self.enterRule(localctx, 250, self.RULE_ctl)
self._la = 0 # Token type
try:
self.state = 1454
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.TAB, sdpParser.LF, sdpParser.CR, sdpParser.U_0000, sdpParser.U_0001, sdpParser.U_0002, sdpParser.U_0003, sdpParser.U_0004, sdpParser.U_0005, sdpParser.U_0006, sdpParser.U_0007, sdpParser.U_0008, sdpParser.U_000B, sdpParser.U_000C, sdpParser.U_000E, sdpParser.U_000F, sdpParser.U_0010, sdpParser.U_0011, sdpParser.U_0012, sdpParser.U_0013, sdpParser.U_0014, sdpParser.U_0015, sdpParser.U_0016, sdpParser.U_0017, sdpParser.U_0018, sdpParser.U_0019, sdpParser.U_001A, sdpParser.U_001B, sdpParser.U_001C, sdpParser.U_001D, sdpParser.U_001E, sdpParser.U_001F]:
self.enterOuterAlt(localctx, 1)
self.state = 1452
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.LF) | (1 << sdpParser.CR))) != 0) or ((((_la - 99)) & ~0x3f) == 0 and ((1 << (_la - 99)) & ((1 << (sdpParser.U_0000 - 99)) | (1 << (sdpParser.U_0001 - 99)) | (1 << (sdpParser.U_0002 - 99)) | (1 << (sdpParser.U_0003 - 99)) | (1 << (sdpParser.U_0004 - 99)) | (1 << (sdpParser.U_0005 - 99)) | (1 << (sdpParser.U_0006 - 99)) | (1 << (sdpParser.U_0007 - 99)) | (1 << (sdpParser.U_0008 - 99)) | (1 << (sdpParser.U_000B - 99)) | (1 << (sdpParser.U_000C - 99)) | (1 << (sdpParser.U_000E - 99)) | (1 << (sdpParser.U_000F - 99)) | (1 << (sdpParser.U_0010 - 99)) | (1 << (sdpParser.U_0011 - 99)) | (1 << (sdpParser.U_0012 - 99)) | (1 << (sdpParser.U_0013 - 99)) | (1 << (sdpParser.U_0014 - 99)) | (1 << (sdpParser.U_0015 - 99)) | (1 << (sdpParser.U_0016 - 99)) | (1 << (sdpParser.U_0017 - 99)) | (1 << (sdpParser.U_0018 - 99)) | (1 << (sdpParser.U_0019 - 99)) | (1 << (sdpParser.U_001A - 99)) | (1 << (sdpParser.U_001B - 99)) | (1 << (sdpParser.U_001C - 99)) | (1 << (sdpParser.U_001D - 99)) | (1 << (sdpParser.U_001E - 99)) | (1 << (sdpParser.U_001F - 99)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.U_007F]:
self.enterOuterAlt(localctx, 2)
self.state = 1453
self.match(sdpParser.U_007F)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DigitContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def getRuleIndex(self):
return sdpParser.RULE_digit
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDigit" ):
listener.enterDigit(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDigit" ):
listener.exitDigit(self)
def digit(self):
localctx = sdpParser.DigitContext(self, self._ctx, self.state)
self.enterRule(localctx, 252, self.RULE_digit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1456
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DquoteContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def QUOTE(self):
return self.getToken(sdpParser.QUOTE, 0)
def getRuleIndex(self):
return sdpParser.RULE_dquote
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDquote" ):
listener.enterDquote(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDquote" ):
listener.exitDquote(self)
def dquote(self):
localctx = sdpParser.DquoteContext(self, self._ctx, self.state)
self.enterRule(localctx, 254, self.RULE_dquote)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1458
self.match(sdpParser.QUOTE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HexdigContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self):
return self.getTypedRuleContext(sdpParser.DigitContext,0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def getRuleIndex(self):
return sdpParser.RULE_hexdig
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHexdig" ):
listener.enterHexdig(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHexdig" ):
listener.exitHexdig(self)
def hexdig(self):
localctx = sdpParser.HexdigContext(self, self._ctx, self.state)
self.enterRule(localctx, 256, self.RULE_hexdig)
self._la = 0 # Token type
try:
self.state = 1467
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 1)
self.state = 1460
self.digit()
pass
elif token in [sdpParser.CAP_A, sdpParser.A]:
self.enterOuterAlt(localctx, 2)
self.state = 1461
_la = self._input.LA(1)
if not(_la==sdpParser.CAP_A or _la==sdpParser.A):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CAP_B, sdpParser.B]:
self.enterOuterAlt(localctx, 3)
self.state = 1462
_la = self._input.LA(1)
if not(_la==sdpParser.CAP_B or _la==sdpParser.B):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CAP_C, sdpParser.C]:
self.enterOuterAlt(localctx, 4)
self.state = 1463
_la = self._input.LA(1)
if not(_la==sdpParser.CAP_C or _la==sdpParser.C):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CAP_D, sdpParser.D]:
self.enterOuterAlt(localctx, 5)
self.state = 1464
_la = self._input.LA(1)
if not(_la==sdpParser.CAP_D or _la==sdpParser.D):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CAP_E, sdpParser.E]:
self.enterOuterAlt(localctx, 6)
self.state = 1465
_la = self._input.LA(1)
if not(_la==sdpParser.CAP_E or _la==sdpParser.E):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
elif token in [sdpParser.CAP_F, sdpParser.F]:
self.enterOuterAlt(localctx, 7)
self.state = 1466
_la = self._input.LA(1)
if not(_la==sdpParser.CAP_F or _la==sdpParser.F):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HtabContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TAB(self):
return self.getToken(sdpParser.TAB, 0)
def getRuleIndex(self):
return sdpParser.RULE_htab
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHtab" ):
listener.enterHtab(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHtab" ):
listener.exitHtab(self)
def htab(self):
localctx = sdpParser.HtabContext(self, self._ctx, self.state)
self.enterRule(localctx, 258, self.RULE_htab)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1469
self.match(sdpParser.TAB)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LfContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LF(self):
return self.getToken(sdpParser.LF, 0)
def getRuleIndex(self):
return sdpParser.RULE_lf
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLf" ):
listener.enterLf(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLf" ):
listener.exitLf(self)
def lf(self):
localctx = sdpParser.LfContext(self, self._ctx, self.state)
self.enterRule(localctx, 260, self.RULE_lf)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1471
self.match(sdpParser.LF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LwspContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def wsp(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.WspContext)
else:
return self.getTypedRuleContext(sdpParser.WspContext,i)
def crlf(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.CrlfContext)
else:
return self.getTypedRuleContext(sdpParser.CrlfContext,i)
def getRuleIndex(self):
return sdpParser.RULE_lwsp
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLwsp" ):
listener.enterLwsp(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLwsp" ):
listener.exitLwsp(self)
def lwsp(self):
localctx = sdpParser.LwspContext(self, self._ctx, self.state)
self.enterRule(localctx, 262, self.RULE_lwsp)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1479
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.TAB) | (1 << sdpParser.CR) | (1 << sdpParser.SPACE))) != 0):
self.state = 1477
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.TAB, sdpParser.SPACE]:
self.state = 1473
self.wsp()
pass
elif token in [sdpParser.CR]:
self.state = 1474
self.crlf()
self.state = 1475
self.wsp()
pass
else:
raise NoViableAltException(self)
self.state = 1481
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OctetContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def U_0000(self):
return self.getToken(sdpParser.U_0000, 0)
def U_0001(self):
return self.getToken(sdpParser.U_0001, 0)
def U_0002(self):
return self.getToken(sdpParser.U_0002, 0)
def U_0003(self):
return self.getToken(sdpParser.U_0003, 0)
def U_0004(self):
return self.getToken(sdpParser.U_0004, 0)
def U_0005(self):
return self.getToken(sdpParser.U_0005, 0)
def U_0006(self):
return self.getToken(sdpParser.U_0006, 0)
def U_0007(self):
return self.getToken(sdpParser.U_0007, 0)
def U_0008(self):
return self.getToken(sdpParser.U_0008, 0)
def TAB(self):
return self.getToken(sdpParser.TAB, 0)
def LF(self):
return self.getToken(sdpParser.LF, 0)
def U_000B(self):
return self.getToken(sdpParser.U_000B, 0)
def U_000C(self):
return self.getToken(sdpParser.U_000C, 0)
def CR(self):
return self.getToken(sdpParser.CR, 0)
def U_000E(self):
return self.getToken(sdpParser.U_000E, 0)
def U_000F(self):
return self.getToken(sdpParser.U_000F, 0)
def U_0010(self):
return self.getToken(sdpParser.U_0010, 0)
def U_0011(self):
return self.getToken(sdpParser.U_0011, 0)
def U_0012(self):
return self.getToken(sdpParser.U_0012, 0)
def U_0013(self):
return self.getToken(sdpParser.U_0013, 0)
def U_0014(self):
return self.getToken(sdpParser.U_0014, 0)
def U_0015(self):
return self.getToken(sdpParser.U_0015, 0)
def U_0016(self):
return self.getToken(sdpParser.U_0016, 0)
def U_0017(self):
return self.getToken(sdpParser.U_0017, 0)
def U_0018(self):
return self.getToken(sdpParser.U_0018, 0)
def U_0019(self):
return self.getToken(sdpParser.U_0019, 0)
def U_001A(self):
return self.getToken(sdpParser.U_001A, 0)
def U_001B(self):
return self.getToken(sdpParser.U_001B, 0)
def U_001C(self):
return self.getToken(sdpParser.U_001C, 0)
def U_001D(self):
return self.getToken(sdpParser.U_001D, 0)
def U_001E(self):
return self.getToken(sdpParser.U_001E, 0)
def U_001F(self):
return self.getToken(sdpParser.U_001F, 0)
def SPACE(self):
return self.getToken(sdpParser.SPACE, 0)
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def QUOTE(self):
return self.getToken(sdpParser.QUOTE, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def BACKSLASH(self):
return self.getToken(sdpParser.BACKSLASH, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def U_007F(self):
return self.getToken(sdpParser.U_007F, 0)
def U_0080(self):
return self.getToken(sdpParser.U_0080, 0)
def U_0081(self):
return self.getToken(sdpParser.U_0081, 0)
def U_0082(self):
return self.getToken(sdpParser.U_0082, 0)
def U_0083(self):
return self.getToken(sdpParser.U_0083, 0)
def U_0084(self):
return self.getToken(sdpParser.U_0084, 0)
def U_0085(self):
return self.getToken(sdpParser.U_0085, 0)
def U_0086(self):
return self.getToken(sdpParser.U_0086, 0)
def U_0087(self):
return self.getToken(sdpParser.U_0087, 0)
def U_0088(self):
return self.getToken(sdpParser.U_0088, 0)
def U_0089(self):
return self.getToken(sdpParser.U_0089, 0)
def U_008A(self):
return self.getToken(sdpParser.U_008A, 0)
def U_008B(self):
return self.getToken(sdpParser.U_008B, 0)
def U_008C(self):
return self.getToken(sdpParser.U_008C, 0)
def U_008D(self):
return self.getToken(sdpParser.U_008D, 0)
def U_008E(self):
return self.getToken(sdpParser.U_008E, 0)
def U_008F(self):
return self.getToken(sdpParser.U_008F, 0)
def U_0090(self):
return self.getToken(sdpParser.U_0090, 0)
def U_0091(self):
return self.getToken(sdpParser.U_0091, 0)
def U_0092(self):
return self.getToken(sdpParser.U_0092, 0)
def U_0093(self):
return self.getToken(sdpParser.U_0093, 0)
def U_0094(self):
return self.getToken(sdpParser.U_0094, 0)
def U_0095(self):
return self.getToken(sdpParser.U_0095, 0)
def U_0096(self):
return self.getToken(sdpParser.U_0096, 0)
def U_0097(self):
return self.getToken(sdpParser.U_0097, 0)
def U_0098(self):
return self.getToken(sdpParser.U_0098, 0)
def U_0099(self):
return self.getToken(sdpParser.U_0099, 0)
def U_009A(self):
return self.getToken(sdpParser.U_009A, 0)
def U_009B(self):
return self.getToken(sdpParser.U_009B, 0)
def U_009C(self):
return self.getToken(sdpParser.U_009C, 0)
def U_009D(self):
return self.getToken(sdpParser.U_009D, 0)
def U_009E(self):
return self.getToken(sdpParser.U_009E, 0)
def U_009F(self):
return self.getToken(sdpParser.U_009F, 0)
def U_00A0(self):
return self.getToken(sdpParser.U_00A0, 0)
def U_00A1(self):
return self.getToken(sdpParser.U_00A1, 0)
def U_00A2(self):
return self.getToken(sdpParser.U_00A2, 0)
def U_00A3(self):
return self.getToken(sdpParser.U_00A3, 0)
def U_00A4(self):
return self.getToken(sdpParser.U_00A4, 0)
def U_00A5(self):
return self.getToken(sdpParser.U_00A5, 0)
def U_00A6(self):
return self.getToken(sdpParser.U_00A6, 0)
def U_00A7(self):
return self.getToken(sdpParser.U_00A7, 0)
def U_00A8(self):
return self.getToken(sdpParser.U_00A8, 0)
def U_00A9(self):
return self.getToken(sdpParser.U_00A9, 0)
def U_00AA(self):
return self.getToken(sdpParser.U_00AA, 0)
def U_00AB(self):
return self.getToken(sdpParser.U_00AB, 0)
def U_00AC(self):
return self.getToken(sdpParser.U_00AC, 0)
def U_00AD(self):
return self.getToken(sdpParser.U_00AD, 0)
def U_00AE(self):
return self.getToken(sdpParser.U_00AE, 0)
def U_00AF(self):
return self.getToken(sdpParser.U_00AF, 0)
def U_00B0(self):
return self.getToken(sdpParser.U_00B0, 0)
def U_00B1(self):
return self.getToken(sdpParser.U_00B1, 0)
def U_00B2(self):
return self.getToken(sdpParser.U_00B2, 0)
def U_00B3(self):
return self.getToken(sdpParser.U_00B3, 0)
def U_00B4(self):
return self.getToken(sdpParser.U_00B4, 0)
def U_00B5(self):
return self.getToken(sdpParser.U_00B5, 0)
def U_00B6(self):
return self.getToken(sdpParser.U_00B6, 0)
def U_00B7(self):
return self.getToken(sdpParser.U_00B7, 0)
def U_00B8(self):
return self.getToken(sdpParser.U_00B8, 0)
def U_00B9(self):
return self.getToken(sdpParser.U_00B9, 0)
def U_00BA(self):
return self.getToken(sdpParser.U_00BA, 0)
def U_00BB(self):
return self.getToken(sdpParser.U_00BB, 0)
def U_00BC(self):
return self.getToken(sdpParser.U_00BC, 0)
def U_00BD(self):
return self.getToken(sdpParser.U_00BD, 0)
def U_00BE(self):
return self.getToken(sdpParser.U_00BE, 0)
def U_00BF(self):
return self.getToken(sdpParser.U_00BF, 0)
def U_00C0(self):
return self.getToken(sdpParser.U_00C0, 0)
def U_00C1(self):
return self.getToken(sdpParser.U_00C1, 0)
def U_00C2(self):
return self.getToken(sdpParser.U_00C2, 0)
def U_00C3(self):
return self.getToken(sdpParser.U_00C3, 0)
def U_00C4(self):
return self.getToken(sdpParser.U_00C4, 0)
def U_00C5(self):
return self.getToken(sdpParser.U_00C5, 0)
def U_00C6(self):
return self.getToken(sdpParser.U_00C6, 0)
def U_00C7(self):
return self.getToken(sdpParser.U_00C7, 0)
def U_00C8(self):
return self.getToken(sdpParser.U_00C8, 0)
def U_00C9(self):
return self.getToken(sdpParser.U_00C9, 0)
def U_00CA(self):
return self.getToken(sdpParser.U_00CA, 0)
def U_00CB(self):
return self.getToken(sdpParser.U_00CB, 0)
def U_00CC(self):
return self.getToken(sdpParser.U_00CC, 0)
def U_00CD(self):
return self.getToken(sdpParser.U_00CD, 0)
def U_00CE(self):
return self.getToken(sdpParser.U_00CE, 0)
def U_00CF(self):
return self.getToken(sdpParser.U_00CF, 0)
def U_00D0(self):
return self.getToken(sdpParser.U_00D0, 0)
def U_00D1(self):
return self.getToken(sdpParser.U_00D1, 0)
def U_00D2(self):
return self.getToken(sdpParser.U_00D2, 0)
def U_00D3(self):
return self.getToken(sdpParser.U_00D3, 0)
def U_00D4(self):
return self.getToken(sdpParser.U_00D4, 0)
def U_00D5(self):
return self.getToken(sdpParser.U_00D5, 0)
def U_00D6(self):
return self.getToken(sdpParser.U_00D6, 0)
def U_00D7(self):
return self.getToken(sdpParser.U_00D7, 0)
def U_00D8(self):
return self.getToken(sdpParser.U_00D8, 0)
def U_00D9(self):
return self.getToken(sdpParser.U_00D9, 0)
def U_00DA(self):
return self.getToken(sdpParser.U_00DA, 0)
def U_00DB(self):
return self.getToken(sdpParser.U_00DB, 0)
def U_00DC(self):
return self.getToken(sdpParser.U_00DC, 0)
def U_00DD(self):
return self.getToken(sdpParser.U_00DD, 0)
def U_00DE(self):
return self.getToken(sdpParser.U_00DE, 0)
def U_00DF(self):
return self.getToken(sdpParser.U_00DF, 0)
def U_00E0(self):
return self.getToken(sdpParser.U_00E0, 0)
def U_00E1(self):
return self.getToken(sdpParser.U_00E1, 0)
def U_00E2(self):
return self.getToken(sdpParser.U_00E2, 0)
def U_00E3(self):
return self.getToken(sdpParser.U_00E3, 0)
def U_00E4(self):
return self.getToken(sdpParser.U_00E4, 0)
def U_00E5(self):
return self.getToken(sdpParser.U_00E5, 0)
def U_00E6(self):
return self.getToken(sdpParser.U_00E6, 0)
def U_00E7(self):
return self.getToken(sdpParser.U_00E7, 0)
def U_00E8(self):
return self.getToken(sdpParser.U_00E8, 0)
def U_00E9(self):
return self.getToken(sdpParser.U_00E9, 0)
def U_00EA(self):
return self.getToken(sdpParser.U_00EA, 0)
def U_00EB(self):
return self.getToken(sdpParser.U_00EB, 0)
def U_00EC(self):
return self.getToken(sdpParser.U_00EC, 0)
def U_00ED(self):
return self.getToken(sdpParser.U_00ED, 0)
def U_00EE(self):
return self.getToken(sdpParser.U_00EE, 0)
def U_00EF(self):
return self.getToken(sdpParser.U_00EF, 0)
def U_00F0(self):
return self.getToken(sdpParser.U_00F0, 0)
def U_00F1(self):
return self.getToken(sdpParser.U_00F1, 0)
def U_00F2(self):
return self.getToken(sdpParser.U_00F2, 0)
def U_00F3(self):
return self.getToken(sdpParser.U_00F3, 0)
def U_00F4(self):
return self.getToken(sdpParser.U_00F4, 0)
def U_00F5(self):
return self.getToken(sdpParser.U_00F5, 0)
def U_00F6(self):
return self.getToken(sdpParser.U_00F6, 0)
def U_00F7(self):
return self.getToken(sdpParser.U_00F7, 0)
def U_00F8(self):
return self.getToken(sdpParser.U_00F8, 0)
def U_00F9(self):
return self.getToken(sdpParser.U_00F9, 0)
def U_00FA(self):
return self.getToken(sdpParser.U_00FA, 0)
def U_00FB(self):
return self.getToken(sdpParser.U_00FB, 0)
def U_00FC(self):
return self.getToken(sdpParser.U_00FC, 0)
def U_00FD(self):
return self.getToken(sdpParser.U_00FD, 0)
def U_00FE(self):
return self.getToken(sdpParser.U_00FE, 0)
def U_00FF(self):
return self.getToken(sdpParser.U_00FF, 0)
def getRuleIndex(self):
return sdpParser.RULE_octet
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOctet" ):
listener.enterOctet(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOctet" ):
listener.exitOctet(self)
def octet(self):
localctx = sdpParser.OctetContext(self, self._ctx, self.state)
self.enterRule(localctx, 264, self.RULE_octet)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1482
_la = self._input.LA(1)
if not(((((_la - 1)) & ~0x3f) == 0 and ((1 << (_la - 1)) & ((1 << (sdpParser.TAB - 1)) | (1 << (sdpParser.LF - 1)) | (1 << (sdpParser.CR - 1)) | (1 << (sdpParser.SPACE - 1)) | (1 << (sdpParser.EXCLAMATION - 1)) | (1 << (sdpParser.QUOTE - 1)) | (1 << (sdpParser.HASH - 1)) | (1 << (sdpParser.DOLLAR - 1)) | (1 << (sdpParser.PERCENT - 1)) | (1 << (sdpParser.AMPERSAND - 1)) | (1 << (sdpParser.APOSTROPHE - 1)) | (1 << (sdpParser.LEFT_PAREN - 1)) | (1 << (sdpParser.RIGHT_PAREN - 1)) | (1 << (sdpParser.ASTERISK - 1)) | (1 << (sdpParser.PLUS - 1)) | (1 << (sdpParser.COMMA - 1)) | (1 << (sdpParser.DASH - 1)) | (1 << (sdpParser.PERIOD - 1)) | (1 << (sdpParser.SLASH - 1)) | (1 << (sdpParser.ZERO - 1)) | (1 << (sdpParser.ONE - 1)) | (1 << (sdpParser.TWO - 1)) | (1 << (sdpParser.THREE - 1)) | (1 << (sdpParser.FOUR - 1)) | (1 << (sdpParser.FIVE - 1)) | (1 << (sdpParser.SIX - 1)) | (1 << (sdpParser.SEVEN - 1)) | (1 << (sdpParser.EIGHT - 1)) | (1 << (sdpParser.NINE - 1)) | (1 << (sdpParser.COLON - 1)) | (1 << (sdpParser.SEMICOLON - 1)) | (1 << (sdpParser.LESS_THAN - 1)) | (1 << (sdpParser.EQUALS - 1)) | (1 << (sdpParser.GREATER_THAN - 1)) | (1 << (sdpParser.QUESTION - 1)) | (1 << (sdpParser.AT - 1)) | (1 << (sdpParser.CAP_A - 1)) | (1 << (sdpParser.CAP_B - 1)) | (1 << (sdpParser.CAP_C - 1)) | (1 << (sdpParser.CAP_D - 1)) | (1 << (sdpParser.CAP_E - 1)) | (1 << (sdpParser.CAP_F - 1)) | (1 << (sdpParser.CAP_G - 1)) | (1 << (sdpParser.CAP_H - 1)) | (1 << (sdpParser.CAP_I - 1)) | (1 << (sdpParser.CAP_J - 1)) | (1 << (sdpParser.CAP_K - 1)) | (1 << (sdpParser.CAP_L - 1)) | (1 << (sdpParser.CAP_M - 1)) | (1 << (sdpParser.CAP_N - 1)) | (1 << (sdpParser.CAP_O - 1)) | (1 << (sdpParser.CAP_P - 1)) | (1 << (sdpParser.CAP_Q - 1)) | (1 << (sdpParser.CAP_R - 1)) | (1 << (sdpParser.CAP_S - 1)) | (1 << (sdpParser.CAP_T - 1)) | (1 << (sdpParser.CAP_U - 1)) | (1 << (sdpParser.CAP_V - 1)) | (1 << (sdpParser.CAP_W - 1)) | (1 << (sdpParser.CAP_X - 1)) | (1 << (sdpParser.CAP_Y - 1)) | (1 << (sdpParser.CAP_Z - 1)) | (1 << (sdpParser.LEFT_BRACE - 1)) | (1 << (sdpParser.BACKSLASH - 1)))) != 0) or ((((_la - 65)) & ~0x3f) == 0 and ((1 << (_la - 65)) & ((1 << (sdpParser.RIGHT_BRACE - 65)) | (1 << (sdpParser.CARAT - 65)) | (1 << (sdpParser.UNDERSCORE - 65)) | (1 << (sdpParser.ACCENT - 65)) | (1 << (sdpParser.A - 65)) | (1 << (sdpParser.B - 65)) | (1 << (sdpParser.C - 65)) | (1 << (sdpParser.D - 65)) | (1 << (sdpParser.E - 65)) | (1 << (sdpParser.F - 65)) | (1 << (sdpParser.G - 65)) | (1 << (sdpParser.H - 65)) | (1 << (sdpParser.I - 65)) | (1 << (sdpParser.J - 65)) | (1 << (sdpParser.K - 65)) | (1 << (sdpParser.L - 65)) | (1 << (sdpParser.M - 65)) | (1 << (sdpParser.N - 65)) | (1 << (sdpParser.O - 65)) | (1 << (sdpParser.P - 65)) | (1 << (sdpParser.Q - 65)) | (1 << (sdpParser.R - 65)) | (1 << (sdpParser.S - 65)) | (1 << (sdpParser.T - 65)) | (1 << (sdpParser.U - 65)) | (1 << (sdpParser.V - 65)) | (1 << (sdpParser.W - 65)) | (1 << (sdpParser.X - 65)) | (1 << (sdpParser.Y - 65)) | (1 << (sdpParser.Z - 65)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 65)) | (1 << (sdpParser.PIPE - 65)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 65)) | (1 << (sdpParser.TILDE - 65)) | (1 << (sdpParser.U_0000 - 65)) | (1 << (sdpParser.U_0001 - 65)) | (1 << (sdpParser.U_0002 - 65)) | (1 << (sdpParser.U_0003 - 65)) | (1 << (sdpParser.U_0004 - 65)) | (1 << (sdpParser.U_0005 - 65)) | (1 << (sdpParser.U_0006 - 65)) | (1 << (sdpParser.U_0007 - 65)) | (1 << (sdpParser.U_0008 - 65)) | (1 << (sdpParser.U_000B - 65)) | (1 << (sdpParser.U_000C - 65)) | (1 << (sdpParser.U_000E - 65)) | (1 << (sdpParser.U_000F - 65)) | (1 << (sdpParser.U_0010 - 65)) | (1 << (sdpParser.U_0011 - 65)) | (1 << (sdpParser.U_0012 - 65)) | (1 << (sdpParser.U_0013 - 65)) | (1 << (sdpParser.U_0014 - 65)) | (1 << (sdpParser.U_0015 - 65)) | (1 << (sdpParser.U_0016 - 65)) | (1 << (sdpParser.U_0017 - 65)) | (1 << (sdpParser.U_0018 - 65)) | (1 << (sdpParser.U_0019 - 65)) | (1 << (sdpParser.U_001A - 65)) | (1 << (sdpParser.U_001B - 65)) | (1 << (sdpParser.U_001C - 65)) | (1 << (sdpParser.U_001D - 65)) | (1 << (sdpParser.U_001E - 65)) | (1 << (sdpParser.U_001F - 65)) | (1 << (sdpParser.U_007F - 65)))) != 0) or ((((_la - 129)) & ~0x3f) == 0 and ((1 << (_la - 129)) & ((1 << (sdpParser.U_0080 - 129)) | (1 << (sdpParser.U_0081 - 129)) | (1 << (sdpParser.U_0082 - 129)) | (1 << (sdpParser.U_0083 - 129)) | (1 << (sdpParser.U_0084 - 129)) | (1 << (sdpParser.U_0085 - 129)) | (1 << (sdpParser.U_0086 - 129)) | (1 << (sdpParser.U_0087 - 129)) | (1 << (sdpParser.U_0088 - 129)) | (1 << (sdpParser.U_0089 - 129)) | (1 << (sdpParser.U_008A - 129)) | (1 << (sdpParser.U_008B - 129)) | (1 << (sdpParser.U_008C - 129)) | (1 << (sdpParser.U_008D - 129)) | (1 << (sdpParser.U_008E - 129)) | (1 << (sdpParser.U_008F - 129)) | (1 << (sdpParser.U_0090 - 129)) | (1 << (sdpParser.U_0091 - 129)) | (1 << (sdpParser.U_0092 - 129)) | (1 << (sdpParser.U_0093 - 129)) | (1 << (sdpParser.U_0094 - 129)) | (1 << (sdpParser.U_0095 - 129)) | (1 << (sdpParser.U_0096 - 129)) | (1 << (sdpParser.U_0097 - 129)) | (1 << (sdpParser.U_0098 - 129)) | (1 << (sdpParser.U_0099 - 129)) | (1 << (sdpParser.U_009A - 129)) | (1 << (sdpParser.U_009B - 129)) | (1 << (sdpParser.U_009C - 129)) | (1 << (sdpParser.U_009D - 129)) | (1 << (sdpParser.U_009E - 129)) | (1 << (sdpParser.U_009F - 129)) | (1 << (sdpParser.U_00A0 - 129)) | (1 << (sdpParser.U_00A1 - 129)) | (1 << (sdpParser.U_00A2 - 129)) | (1 << (sdpParser.U_00A3 - 129)) | (1 << (sdpParser.U_00A4 - 129)) | (1 << (sdpParser.U_00A5 - 129)) | (1 << (sdpParser.U_00A6 - 129)) | (1 << (sdpParser.U_00A7 - 129)) | (1 << (sdpParser.U_00A8 - 129)) | (1 << (sdpParser.U_00A9 - 129)) | (1 << (sdpParser.U_00AA - 129)) | (1 << (sdpParser.U_00AB - 129)) | (1 << (sdpParser.U_00AC - 129)) | (1 << (sdpParser.U_00AD - 129)) | (1 << (sdpParser.U_00AE - 129)) | (1 << (sdpParser.U_00AF - 129)) | (1 << (sdpParser.U_00B0 - 129)) | (1 << (sdpParser.U_00B1 - 129)) | (1 << (sdpParser.U_00B2 - 129)) | (1 << (sdpParser.U_00B3 - 129)) | (1 << (sdpParser.U_00B4 - 129)) | (1 << (sdpParser.U_00B5 - 129)) | (1 << (sdpParser.U_00B6 - 129)) | (1 << (sdpParser.U_00B7 - 129)) | (1 << (sdpParser.U_00B8 - 129)) | (1 << (sdpParser.U_00B9 - 129)) | (1 << (sdpParser.U_00BA - 129)) | (1 << (sdpParser.U_00BB - 129)) | (1 << (sdpParser.U_00BC - 129)) | (1 << (sdpParser.U_00BD - 129)) | (1 << (sdpParser.U_00BE - 129)) | (1 << (sdpParser.U_00BF - 129)))) != 0) or ((((_la - 193)) & ~0x3f) == 0 and ((1 << (_la - 193)) & ((1 << (sdpParser.U_00C0 - 193)) | (1 << (sdpParser.U_00C1 - 193)) | (1 << (sdpParser.U_00C2 - 193)) | (1 << (sdpParser.U_00C3 - 193)) | (1 << (sdpParser.U_00C4 - 193)) | (1 << (sdpParser.U_00C5 - 193)) | (1 << (sdpParser.U_00C6 - 193)) | (1 << (sdpParser.U_00C7 - 193)) | (1 << (sdpParser.U_00C8 - 193)) | (1 << (sdpParser.U_00C9 - 193)) | (1 << (sdpParser.U_00CA - 193)) | (1 << (sdpParser.U_00CB - 193)) | (1 << (sdpParser.U_00CC - 193)) | (1 << (sdpParser.U_00CD - 193)) | (1 << (sdpParser.U_00CE - 193)) | (1 << (sdpParser.U_00CF - 193)) | (1 << (sdpParser.U_00D0 - 193)) | (1 << (sdpParser.U_00D1 - 193)) | (1 << (sdpParser.U_00D2 - 193)) | (1 << (sdpParser.U_00D3 - 193)) | (1 << (sdpParser.U_00D4 - 193)) | (1 << (sdpParser.U_00D5 - 193)) | (1 << (sdpParser.U_00D6 - 193)) | (1 << (sdpParser.U_00D7 - 193)) | (1 << (sdpParser.U_00D8 - 193)) | (1 << (sdpParser.U_00D9 - 193)) | (1 << (sdpParser.U_00DA - 193)) | (1 << (sdpParser.U_00DB - 193)) | (1 << (sdpParser.U_00DC - 193)) | (1 << (sdpParser.U_00DD - 193)) | (1 << (sdpParser.U_00DE - 193)) | (1 << (sdpParser.U_00DF - 193)) | (1 << (sdpParser.U_00E0 - 193)) | (1 << (sdpParser.U_00E1 - 193)) | (1 << (sdpParser.U_00E2 - 193)) | (1 << (sdpParser.U_00E3 - 193)) | (1 << (sdpParser.U_00E4 - 193)) | (1 << (sdpParser.U_00E5 - 193)) | (1 << (sdpParser.U_00E6 - 193)) | (1 << (sdpParser.U_00E7 - 193)) | (1 << (sdpParser.U_00E8 - 193)) | (1 << (sdpParser.U_00E9 - 193)) | (1 << (sdpParser.U_00EA - 193)) | (1 << (sdpParser.U_00EB - 193)) | (1 << (sdpParser.U_00EC - 193)) | (1 << (sdpParser.U_00ED - 193)) | (1 << (sdpParser.U_00EE - 193)) | (1 << (sdpParser.U_00EF - 193)) | (1 << (sdpParser.U_00F0 - 193)) | (1 << (sdpParser.U_00F1 - 193)) | (1 << (sdpParser.U_00F2 - 193)) | (1 << (sdpParser.U_00F3 - 193)) | (1 << (sdpParser.U_00F4 - 193)) | (1 << (sdpParser.U_00F5 - 193)) | (1 << (sdpParser.U_00F6 - 193)) | (1 << (sdpParser.U_00F7 - 193)) | (1 << (sdpParser.U_00F8 - 193)) | (1 << (sdpParser.U_00F9 - 193)) | (1 << (sdpParser.U_00FA - 193)) | (1 << (sdpParser.U_00FB - 193)) | (1 << (sdpParser.U_00FC - 193)) | (1 << (sdpParser.U_00FD - 193)) | (1 << (sdpParser.U_00FE - 193)) | (1 << (sdpParser.U_00FF - 193)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SpContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def SPACE(self):
return self.getToken(sdpParser.SPACE, 0)
def getRuleIndex(self):
return sdpParser.RULE_sp
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSp" ):
listener.enterSp(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSp" ):
listener.exitSp(self)
def sp(self):
localctx = sdpParser.SpContext(self, self._ctx, self.state)
self.enterRule(localctx, 266, self.RULE_sp)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1484
self.match(sdpParser.SPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VcharContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def QUOTE(self):
return self.getToken(sdpParser.QUOTE, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self):
return self.getToken(sdpParser.TWO, 0)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self):
return self.getToken(sdpParser.FIVE, 0)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def LESS_THAN(self):
return self.getToken(sdpParser.LESS_THAN, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def GREATER_THAN(self):
return self.getToken(sdpParser.GREATER_THAN, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def CAP_A(self):
return self.getToken(sdpParser.CAP_A, 0)
def CAP_B(self):
return self.getToken(sdpParser.CAP_B, 0)
def CAP_C(self):
return self.getToken(sdpParser.CAP_C, 0)
def CAP_D(self):
return self.getToken(sdpParser.CAP_D, 0)
def CAP_E(self):
return self.getToken(sdpParser.CAP_E, 0)
def CAP_F(self):
return self.getToken(sdpParser.CAP_F, 0)
def CAP_G(self):
return self.getToken(sdpParser.CAP_G, 0)
def CAP_H(self):
return self.getToken(sdpParser.CAP_H, 0)
def CAP_I(self):
return self.getToken(sdpParser.CAP_I, 0)
def CAP_J(self):
return self.getToken(sdpParser.CAP_J, 0)
def CAP_K(self):
return self.getToken(sdpParser.CAP_K, 0)
def CAP_L(self):
return self.getToken(sdpParser.CAP_L, 0)
def CAP_M(self):
return self.getToken(sdpParser.CAP_M, 0)
def CAP_N(self):
return self.getToken(sdpParser.CAP_N, 0)
def CAP_O(self):
return self.getToken(sdpParser.CAP_O, 0)
def CAP_P(self):
return self.getToken(sdpParser.CAP_P, 0)
def CAP_Q(self):
return self.getToken(sdpParser.CAP_Q, 0)
def CAP_R(self):
return self.getToken(sdpParser.CAP_R, 0)
def CAP_S(self):
return self.getToken(sdpParser.CAP_S, 0)
def CAP_T(self):
return self.getToken(sdpParser.CAP_T, 0)
def CAP_U(self):
return self.getToken(sdpParser.CAP_U, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def CAP_W(self):
return self.getToken(sdpParser.CAP_W, 0)
def CAP_X(self):
return self.getToken(sdpParser.CAP_X, 0)
def CAP_Y(self):
return self.getToken(sdpParser.CAP_Y, 0)
def CAP_Z(self):
return self.getToken(sdpParser.CAP_Z, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def BACKSLASH(self):
return self.getToken(sdpParser.BACKSLASH, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def CARAT(self):
return self.getToken(sdpParser.CARAT, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def ACCENT(self):
return self.getToken(sdpParser.ACCENT, 0)
def A(self):
return self.getToken(sdpParser.A, 0)
def B(self):
return self.getToken(sdpParser.B, 0)
def C(self):
return self.getToken(sdpParser.C, 0)
def D(self):
return self.getToken(sdpParser.D, 0)
def E(self):
return self.getToken(sdpParser.E, 0)
def F(self):
return self.getToken(sdpParser.F, 0)
def G(self):
return self.getToken(sdpParser.G, 0)
def H(self):
return self.getToken(sdpParser.H, 0)
def I(self):
return self.getToken(sdpParser.I, 0)
def J(self):
return self.getToken(sdpParser.J, 0)
def K(self):
return self.getToken(sdpParser.K, 0)
def L(self):
return self.getToken(sdpParser.L, 0)
def M(self):
return self.getToken(sdpParser.M, 0)
def N(self):
return self.getToken(sdpParser.N, 0)
def O(self):
return self.getToken(sdpParser.O, 0)
def P(self):
return self.getToken(sdpParser.P, 0)
def Q(self):
return self.getToken(sdpParser.Q, 0)
def R(self):
return self.getToken(sdpParser.R, 0)
def S(self):
return self.getToken(sdpParser.S, 0)
def T(self):
return self.getToken(sdpParser.T, 0)
def U(self):
return self.getToken(sdpParser.U, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def W(self):
return self.getToken(sdpParser.W, 0)
def X(self):
return self.getToken(sdpParser.X, 0)
def Y(self):
return self.getToken(sdpParser.Y, 0)
def Z(self):
return self.getToken(sdpParser.Z, 0)
def LEFT_CURLY_BRACE(self):
return self.getToken(sdpParser.LEFT_CURLY_BRACE, 0)
def PIPE(self):
return self.getToken(sdpParser.PIPE, 0)
def RIGHT_CURLY_BRACE(self):
return self.getToken(sdpParser.RIGHT_CURLY_BRACE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def getRuleIndex(self):
return sdpParser.RULE_vchar
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVchar" ):
listener.enterVchar(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVchar" ):
listener.exitVchar(self)
def vchar(self):
localctx = sdpParser.VcharContext(self, self._ctx, self.state)
self.enterRule(localctx, 268, self.RULE_vchar)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1486
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.QUOTE) | (1 << sdpParser.HASH) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.LESS_THAN) | (1 << sdpParser.EQUALS) | (1 << sdpParser.GREATER_THAN) | (1 << sdpParser.QUESTION) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z) | (1 << sdpParser.LEFT_BRACE))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (sdpParser.BACKSLASH - 64)) | (1 << (sdpParser.RIGHT_BRACE - 64)) | (1 << (sdpParser.CARAT - 64)) | (1 << (sdpParser.UNDERSCORE - 64)) | (1 << (sdpParser.ACCENT - 64)) | (1 << (sdpParser.A - 64)) | (1 << (sdpParser.B - 64)) | (1 << (sdpParser.C - 64)) | (1 << (sdpParser.D - 64)) | (1 << (sdpParser.E - 64)) | (1 << (sdpParser.F - 64)) | (1 << (sdpParser.G - 64)) | (1 << (sdpParser.H - 64)) | (1 << (sdpParser.I - 64)) | (1 << (sdpParser.J - 64)) | (1 << (sdpParser.K - 64)) | (1 << (sdpParser.L - 64)) | (1 << (sdpParser.M - 64)) | (1 << (sdpParser.N - 64)) | (1 << (sdpParser.O - 64)) | (1 << (sdpParser.P - 64)) | (1 << (sdpParser.Q - 64)) | (1 << (sdpParser.R - 64)) | (1 << (sdpParser.S - 64)) | (1 << (sdpParser.T - 64)) | (1 << (sdpParser.U - 64)) | (1 << (sdpParser.V - 64)) | (1 << (sdpParser.W - 64)) | (1 << (sdpParser.X - 64)) | (1 << (sdpParser.Y - 64)) | (1 << (sdpParser.Z - 64)) | (1 << (sdpParser.LEFT_CURLY_BRACE - 64)) | (1 << (sdpParser.PIPE - 64)) | (1 << (sdpParser.RIGHT_CURLY_BRACE - 64)) | (1 << (sdpParser.TILDE - 64)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class WspContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def sp(self):
return self.getTypedRuleContext(sdpParser.SpContext,0)
def htab(self):
return self.getTypedRuleContext(sdpParser.HtabContext,0)
def getRuleIndex(self):
return sdpParser.RULE_wsp
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterWsp" ):
listener.enterWsp(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitWsp" ):
listener.exitWsp(self)
def wsp(self):
localctx = sdpParser.WspContext(self, self._ctx, self.state)
self.enterRule(localctx, 270, self.RULE_wsp)
try:
self.state = 1490
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.SPACE]:
self.enterOuterAlt(localctx, 1)
self.state = 1488
self.sp()
pass
elif token in [sdpParser.TAB]:
self.enterOuterAlt(localctx, 2)
self.state = 1489
self.htab()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class XxuriContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def scheme(self):
return self.getTypedRuleContext(sdpParser.SchemeContext,0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def hier_part(self):
return self.getTypedRuleContext(sdpParser.Hier_partContext,0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def query(self):
return self.getTypedRuleContext(sdpParser.QueryContext,0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def fragment_1(self):
return self.getTypedRuleContext(sdpParser.Fragment_1Context,0)
def getRuleIndex(self):
return sdpParser.RULE_xxuri
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterXxuri" ):
listener.enterXxuri(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitXxuri" ):
listener.exitXxuri(self)
def xxuri(self):
localctx = sdpParser.XxuriContext(self, self._ctx, self.state)
self.enterRule(localctx, 272, self.RULE_xxuri)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1492
self.scheme()
self.state = 1493
self.match(sdpParser.COLON)
self.state = 1494
self.hier_part()
self.state = 1497
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.QUESTION:
self.state = 1495
self.match(sdpParser.QUESTION)
self.state = 1496
self.query()
self.state = 1501
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.HASH:
self.state = 1499
self.match(sdpParser.HASH)
self.state = 1500
self.fragment_1()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Hier_partContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def authority(self):
return self.getTypedRuleContext(sdpParser.AuthorityContext,0)
def path_abempty(self):
return self.getTypedRuleContext(sdpParser.Path_abemptyContext,0)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def path_absolute(self):
return self.getTypedRuleContext(sdpParser.Path_absoluteContext,0)
def path_rootless(self):
return self.getTypedRuleContext(sdpParser.Path_rootlessContext,0)
def path_empty(self):
return self.getTypedRuleContext(sdpParser.Path_emptyContext,0)
def getRuleIndex(self):
return sdpParser.RULE_hier_part
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHier_part" ):
listener.enterHier_part(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHier_part" ):
listener.exitHier_part(self)
def hier_part(self):
localctx = sdpParser.Hier_partContext(self, self._ctx, self.state)
self.enterRule(localctx, 274, self.RULE_hier_part)
try:
self.state = 1512
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,178,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1503
self.match(sdpParser.SLASH)
self.state = 1504
self.match(sdpParser.SLASH)
self.state = 1506
self.authority()
self.state = 1507
self.path_abempty()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1509
self.path_absolute()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1510
self.path_rootless()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1511
self.path_empty()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Uri_referenceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def xxuri(self):
return self.getTypedRuleContext(sdpParser.XxuriContext,0)
def relative_ref(self):
return self.getTypedRuleContext(sdpParser.Relative_refContext,0)
def getRuleIndex(self):
return sdpParser.RULE_uri_reference
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUri_reference" ):
listener.enterUri_reference(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUri_reference" ):
listener.exitUri_reference(self)
def uri_reference(self):
localctx = sdpParser.Uri_referenceContext(self, self._ctx, self.state)
self.enterRule(localctx, 276, self.RULE_uri_reference)
try:
self.state = 1516
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,179,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1514
self.xxuri()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1515
self.relative_ref()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Absolute_uriContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def scheme(self):
return self.getTypedRuleContext(sdpParser.SchemeContext,0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def hier_part(self):
return self.getTypedRuleContext(sdpParser.Hier_partContext,0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def query(self):
return self.getTypedRuleContext(sdpParser.QueryContext,0)
def getRuleIndex(self):
return sdpParser.RULE_absolute_uri
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAbsolute_uri" ):
listener.enterAbsolute_uri(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAbsolute_uri" ):
listener.exitAbsolute_uri(self)
def absolute_uri(self):
localctx = sdpParser.Absolute_uriContext(self, self._ctx, self.state)
self.enterRule(localctx, 278, self.RULE_absolute_uri)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1518
self.scheme()
self.state = 1519
self.match(sdpParser.COLON)
self.state = 1520
self.hier_part()
self.state = 1523
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.QUESTION:
self.state = 1521
self.match(sdpParser.QUESTION)
self.state = 1522
self.query()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Relative_refContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def relative_part(self):
return self.getTypedRuleContext(sdpParser.Relative_partContext,0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def query(self):
return self.getTypedRuleContext(sdpParser.QueryContext,0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def fragment_1(self):
return self.getTypedRuleContext(sdpParser.Fragment_1Context,0)
def getRuleIndex(self):
return sdpParser.RULE_relative_ref
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRelative_ref" ):
listener.enterRelative_ref(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRelative_ref" ):
listener.exitRelative_ref(self)
def relative_ref(self):
localctx = sdpParser.Relative_refContext(self, self._ctx, self.state)
self.enterRule(localctx, 280, self.RULE_relative_ref)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1525
self.relative_part()
self.state = 1528
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.QUESTION:
self.state = 1526
self.match(sdpParser.QUESTION)
self.state = 1527
self.query()
self.state = 1532
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.HASH:
self.state = 1530
self.match(sdpParser.HASH)
self.state = 1531
self.fragment_1()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Relative_partContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def authority(self):
return self.getTypedRuleContext(sdpParser.AuthorityContext,0)
def path_abempty(self):
return self.getTypedRuleContext(sdpParser.Path_abemptyContext,0)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def path_absolute(self):
return self.getTypedRuleContext(sdpParser.Path_absoluteContext,0)
def path_noscheme(self):
return self.getTypedRuleContext(sdpParser.Path_noschemeContext,0)
def path_empty(self):
return self.getTypedRuleContext(sdpParser.Path_emptyContext,0)
def getRuleIndex(self):
return sdpParser.RULE_relative_part
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRelative_part" ):
listener.enterRelative_part(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRelative_part" ):
listener.exitRelative_part(self)
def relative_part(self):
localctx = sdpParser.Relative_partContext(self, self._ctx, self.state)
self.enterRule(localctx, 282, self.RULE_relative_part)
try:
self.state = 1543
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,183,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1534
self.match(sdpParser.SLASH)
self.state = 1535
self.match(sdpParser.SLASH)
self.state = 1537
self.authority()
self.state = 1538
self.path_abempty()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1540
self.path_absolute()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1541
self.path_noscheme()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1542
self.path_empty()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SchemeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def alpha(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.AlphaContext)
else:
return self.getTypedRuleContext(sdpParser.AlphaContext,i)
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def PLUS(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PLUS)
else:
return self.getToken(sdpParser.PLUS, i)
def DASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.DASH)
else:
return self.getToken(sdpParser.DASH, i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def getRuleIndex(self):
return sdpParser.RULE_scheme
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterScheme" ):
listener.enterScheme(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitScheme" ):
listener.exitScheme(self)
def scheme(self):
localctx = sdpParser.SchemeContext(self, self._ctx, self.state)
self.enterRule(localctx, 284, self.RULE_scheme)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1545
self.alpha()
self.state = 1553
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.PLUS) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 69)) & ~0x3f) == 0 and ((1 << (_la - 69)) & ((1 << (sdpParser.A - 69)) | (1 << (sdpParser.B - 69)) | (1 << (sdpParser.C - 69)) | (1 << (sdpParser.D - 69)) | (1 << (sdpParser.E - 69)) | (1 << (sdpParser.F - 69)) | (1 << (sdpParser.G - 69)) | (1 << (sdpParser.H - 69)) | (1 << (sdpParser.I - 69)) | (1 << (sdpParser.J - 69)) | (1 << (sdpParser.K - 69)) | (1 << (sdpParser.L - 69)) | (1 << (sdpParser.M - 69)) | (1 << (sdpParser.N - 69)) | (1 << (sdpParser.O - 69)) | (1 << (sdpParser.P - 69)) | (1 << (sdpParser.Q - 69)) | (1 << (sdpParser.R - 69)) | (1 << (sdpParser.S - 69)) | (1 << (sdpParser.T - 69)) | (1 << (sdpParser.U - 69)) | (1 << (sdpParser.V - 69)) | (1 << (sdpParser.W - 69)) | (1 << (sdpParser.X - 69)) | (1 << (sdpParser.Y - 69)) | (1 << (sdpParser.Z - 69)))) != 0):
self.state = 1551
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.state = 1546
self.alpha()
pass
elif token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.state = 1547
self.digit()
pass
elif token in [sdpParser.PLUS]:
self.state = 1548
self.match(sdpParser.PLUS)
pass
elif token in [sdpParser.DASH]:
self.state = 1549
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.PERIOD]:
self.state = 1550
self.match(sdpParser.PERIOD)
pass
else:
raise NoViableAltException(self)
self.state = 1555
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AuthorityContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def host(self):
return self.getTypedRuleContext(sdpParser.HostContext,0)
def userinfo(self):
return self.getTypedRuleContext(sdpParser.UserinfoContext,0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def xport(self):
return self.getTypedRuleContext(sdpParser.XportContext,0)
def getRuleIndex(self):
return sdpParser.RULE_authority
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAuthority" ):
listener.enterAuthority(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAuthority" ):
listener.exitAuthority(self)
def authority(self):
localctx = sdpParser.AuthorityContext(self, self._ctx, self.state)
self.enterRule(localctx, 286, self.RULE_authority)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1559
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,186,self._ctx)
if la_ == 1:
self.state = 1556
self.userinfo()
self.state = 1557
self.match(sdpParser.AT)
self.state = 1561
self.host()
self.state = 1564
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==sdpParser.COLON:
self.state = 1562
self.match(sdpParser.COLON)
self.state = 1563
self.xport()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UserinfoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unreserved(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.UnreservedContext)
else:
return self.getTypedRuleContext(sdpParser.UnreservedContext,i)
def pct_encoded(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Pct_encodedContext)
else:
return self.getTypedRuleContext(sdpParser.Pct_encodedContext,i)
def sub_delims(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Sub_delimsContext)
else:
return self.getTypedRuleContext(sdpParser.Sub_delimsContext,i)
def COLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COLON)
else:
return self.getToken(sdpParser.COLON, i)
def getRuleIndex(self):
return sdpParser.RULE_userinfo
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUserinfo" ):
listener.enterUserinfo(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUserinfo" ):
listener.exitUserinfo(self)
def userinfo(self):
localctx = sdpParser.UserinfoContext(self, self._ctx, self.state)
self.enterRule(localctx, 288, self.RULE_userinfo)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1572
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0):
self.state = 1570
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.DASH, sdpParser.PERIOD, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.UNDERSCORE, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.TILDE]:
self.state = 1566
self.unreserved()
pass
elif token in [sdpParser.PERCENT]:
self.state = 1567
self.pct_encoded()
pass
elif token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.SEMICOLON, sdpParser.EQUALS]:
self.state = 1568
self.sub_delims()
pass
elif token in [sdpParser.COLON]:
self.state = 1569
self.match(sdpParser.COLON)
pass
else:
raise NoViableAltException(self)
self.state = 1574
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class HostContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ip_literal(self):
return self.getTypedRuleContext(sdpParser.Ip_literalContext,0)
def ipv4address(self):
return self.getTypedRuleContext(sdpParser.Ipv4addressContext,0)
def reg_name(self):
return self.getTypedRuleContext(sdpParser.Reg_nameContext,0)
def getRuleIndex(self):
return sdpParser.RULE_host
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterHost" ):
listener.enterHost(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitHost" ):
listener.exitHost(self)
def host(self):
localctx = sdpParser.HostContext(self, self._ctx, self.state)
self.enterRule(localctx, 290, self.RULE_host)
try:
self.state = 1578
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,190,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1575
self.ip_literal()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1576
self.ipv4address()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1577
self.reg_name()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class XportContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def getRuleIndex(self):
return sdpParser.RULE_xport
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterXport" ):
listener.enterXport(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitXport" ):
listener.exitXport(self)
def xport(self):
localctx = sdpParser.XportContext(self, self._ctx, self.state)
self.enterRule(localctx, 292, self.RULE_xport)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1583
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0):
self.state = 1580
self.digit()
self.state = 1585
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ip_literalContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def ipv6address(self):
return self.getTypedRuleContext(sdpParser.Ipv6addressContext,0)
def ipvfuture(self):
return self.getTypedRuleContext(sdpParser.IpvfutureContext,0)
def getRuleIndex(self):
return sdpParser.RULE_ip_literal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIp_literal" ):
listener.enterIp_literal(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIp_literal" ):
listener.exitIp_literal(self)
def ip_literal(self):
localctx = sdpParser.Ip_literalContext(self, self._ctx, self.state)
self.enterRule(localctx, 294, self.RULE_ip_literal)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1586
self.match(sdpParser.LEFT_BRACE)
self.state = 1589
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F]:
self.state = 1587
self.ipv6address()
pass
elif token in [sdpParser.CAP_V, sdpParser.V]:
self.state = 1588
self.ipvfuture()
pass
else:
raise NoViableAltException(self)
self.state = 1591
self.match(sdpParser.RIGHT_BRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IpvfutureContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def CAP_V(self):
return self.getToken(sdpParser.CAP_V, 0)
def V(self):
return self.getToken(sdpParser.V, 0)
def hexdig(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.HexdigContext)
else:
return self.getTypedRuleContext(sdpParser.HexdigContext,i)
def unreserved(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.UnreservedContext)
else:
return self.getTypedRuleContext(sdpParser.UnreservedContext,i)
def sub_delims(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Sub_delimsContext)
else:
return self.getTypedRuleContext(sdpParser.Sub_delimsContext,i)
def COLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COLON)
else:
return self.getToken(sdpParser.COLON, i)
def getRuleIndex(self):
return sdpParser.RULE_ipvfuture
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIpvfuture" ):
listener.enterIpvfuture(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIpvfuture" ):
listener.exitIpvfuture(self)
def ipvfuture(self):
localctx = sdpParser.IpvfutureContext(self, self._ctx, self.state)
self.enterRule(localctx, 296, self.RULE_ipvfuture)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1593
_la = self._input.LA(1)
if not(_la==sdpParser.CAP_V or _la==sdpParser.V):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 1595
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1594
self.hexdig()
self.state = 1597
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0)):
break
self.state = 1599
self.match(sdpParser.PERIOD)
self.state = 1603
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 1603
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.DASH, sdpParser.PERIOD, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.UNDERSCORE, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.TILDE]:
self.state = 1600
self.unreserved()
pass
elif token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.SEMICOLON, sdpParser.EQUALS]:
self.state = 1601
self.sub_delims()
pass
elif token in [sdpParser.COLON]:
self.state = 1602
self.match(sdpParser.COLON)
pass
else:
raise NoViableAltException(self)
self.state = 1605
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ipv6addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ls32(self):
return self.getTypedRuleContext(sdpParser.Ls32Context,0)
def h16(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.H16Context)
else:
return self.getTypedRuleContext(sdpParser.H16Context,i)
def COLON(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.COLON)
else:
return self.getToken(sdpParser.COLON, i)
def getRuleIndex(self):
return sdpParser.RULE_ipv6address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIpv6address" ):
listener.enterIpv6address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIpv6address" ):
listener.exitIpv6address(self)
def ipv6address(self):
localctx = sdpParser.Ipv6addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 298, self.RULE_ipv6address)
self._la = 0 # Token type
try:
self.state = 1917
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,214,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1607
self.h16()
self.state = 1608
self.match(sdpParser.COLON)
self.state = 1610
self.h16()
self.state = 1611
self.match(sdpParser.COLON)
self.state = 1613
self.h16()
self.state = 1614
self.match(sdpParser.COLON)
self.state = 1616
self.h16()
self.state = 1617
self.match(sdpParser.COLON)
self.state = 1619
self.h16()
self.state = 1620
self.match(sdpParser.COLON)
self.state = 1622
self.h16()
self.state = 1623
self.match(sdpParser.COLON)
self.state = 1625
self.ls32()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1627
self.match(sdpParser.COLON)
self.state = 1628
self.match(sdpParser.COLON)
self.state = 1630
self.h16()
self.state = 1631
self.match(sdpParser.COLON)
self.state = 1633
self.h16()
self.state = 1634
self.match(sdpParser.COLON)
self.state = 1636
self.h16()
self.state = 1637
self.match(sdpParser.COLON)
self.state = 1639
self.h16()
self.state = 1640
self.match(sdpParser.COLON)
self.state = 1642
self.h16()
self.state = 1643
self.match(sdpParser.COLON)
self.state = 1645
self.ls32()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1648
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1647
self.h16()
self.state = 1650
self.match(sdpParser.COLON)
self.state = 1651
self.match(sdpParser.COLON)
self.state = 1653
self.h16()
self.state = 1654
self.match(sdpParser.COLON)
self.state = 1656
self.h16()
self.state = 1657
self.match(sdpParser.COLON)
self.state = 1659
self.h16()
self.state = 1660
self.match(sdpParser.COLON)
self.state = 1662
self.h16()
self.state = 1663
self.match(sdpParser.COLON)
self.state = 1665
self.ls32()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1673
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1670
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,197,self._ctx)
if la_ == 1:
self.state = 1667
self.h16()
self.state = 1668
self.match(sdpParser.COLON)
self.state = 1672
self.h16()
self.state = 1675
self.match(sdpParser.COLON)
self.state = 1676
self.match(sdpParser.COLON)
self.state = 1678
self.h16()
self.state = 1679
self.match(sdpParser.COLON)
self.state = 1681
self.h16()
self.state = 1682
self.match(sdpParser.COLON)
self.state = 1684
self.h16()
self.state = 1685
self.match(sdpParser.COLON)
self.state = 1687
self.ls32()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1703
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1700
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,200,self._ctx)
if la_ == 1:
self.state = 1689
self.h16()
self.state = 1690
self.match(sdpParser.COLON)
self.state = 1692
self.h16()
self.state = 1693
self.match(sdpParser.COLON)
pass
elif la_ == 2:
self.state = 1698
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,199,self._ctx)
if la_ == 1:
self.state = 1695
self.h16()
self.state = 1696
self.match(sdpParser.COLON)
pass
self.state = 1702
self.h16()
self.state = 1705
self.match(sdpParser.COLON)
self.state = 1706
self.match(sdpParser.COLON)
self.state = 1708
self.h16()
self.state = 1709
self.match(sdpParser.COLON)
self.state = 1711
self.h16()
self.state = 1712
self.match(sdpParser.COLON)
self.state = 1714
self.ls32()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1739
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1736
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,203,self._ctx)
if la_ == 1:
self.state = 1716
self.h16()
self.state = 1717
self.match(sdpParser.COLON)
self.state = 1719
self.h16()
self.state = 1720
self.match(sdpParser.COLON)
self.state = 1722
self.h16()
self.state = 1723
self.match(sdpParser.COLON)
pass
elif la_ == 2:
self.state = 1725
self.h16()
self.state = 1726
self.match(sdpParser.COLON)
self.state = 1728
self.h16()
self.state = 1729
self.match(sdpParser.COLON)
pass
elif la_ == 3:
self.state = 1734
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,202,self._ctx)
if la_ == 1:
self.state = 1731
self.h16()
self.state = 1732
self.match(sdpParser.COLON)
pass
self.state = 1738
self.h16()
self.state = 1741
self.match(sdpParser.COLON)
self.state = 1742
self.match(sdpParser.COLON)
self.state = 1744
self.h16()
self.state = 1745
self.match(sdpParser.COLON)
self.state = 1746
self.ls32()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 1783
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1780
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,206,self._ctx)
if la_ == 1:
self.state = 1748
self.h16()
self.state = 1749
self.match(sdpParser.COLON)
self.state = 1751
self.h16()
self.state = 1752
self.match(sdpParser.COLON)
self.state = 1754
self.h16()
self.state = 1755
self.match(sdpParser.COLON)
self.state = 1757
self.h16()
self.state = 1758
self.match(sdpParser.COLON)
pass
elif la_ == 2:
self.state = 1760
self.h16()
self.state = 1761
self.match(sdpParser.COLON)
self.state = 1763
self.h16()
self.state = 1764
self.match(sdpParser.COLON)
self.state = 1766
self.h16()
self.state = 1767
self.match(sdpParser.COLON)
pass
elif la_ == 3:
self.state = 1769
self.h16()
self.state = 1770
self.match(sdpParser.COLON)
self.state = 1772
self.h16()
self.state = 1773
self.match(sdpParser.COLON)
pass
elif la_ == 4:
self.state = 1778
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,205,self._ctx)
if la_ == 1:
self.state = 1775
self.h16()
self.state = 1776
self.match(sdpParser.COLON)
pass
self.state = 1782
self.h16()
self.state = 1785
self.match(sdpParser.COLON)
self.state = 1786
self.match(sdpParser.COLON)
self.state = 1788
self.ls32()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 1839
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1836
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,209,self._ctx)
if la_ == 1:
self.state = 1789
self.h16()
self.state = 1790
self.match(sdpParser.COLON)
self.state = 1792
self.h16()
self.state = 1793
self.match(sdpParser.COLON)
self.state = 1795
self.h16()
self.state = 1796
self.match(sdpParser.COLON)
self.state = 1798
self.h16()
self.state = 1799
self.match(sdpParser.COLON)
self.state = 1801
self.h16()
self.state = 1802
self.match(sdpParser.COLON)
pass
elif la_ == 2:
self.state = 1804
self.h16()
self.state = 1805
self.match(sdpParser.COLON)
self.state = 1807
self.h16()
self.state = 1808
self.match(sdpParser.COLON)
self.state = 1810
self.h16()
self.state = 1811
self.match(sdpParser.COLON)
self.state = 1813
self.h16()
self.state = 1814
self.match(sdpParser.COLON)
pass
elif la_ == 3:
self.state = 1816
self.h16()
self.state = 1817
self.match(sdpParser.COLON)
self.state = 1819
self.h16()
self.state = 1820
self.match(sdpParser.COLON)
self.state = 1822
self.h16()
self.state = 1823
self.match(sdpParser.COLON)
pass
elif la_ == 4:
self.state = 1825
self.h16()
self.state = 1826
self.match(sdpParser.COLON)
self.state = 1828
self.h16()
self.state = 1829
self.match(sdpParser.COLON)
pass
elif la_ == 5:
self.state = 1834
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,208,self._ctx)
if la_ == 1:
self.state = 1831
self.h16()
self.state = 1832
self.match(sdpParser.COLON)
pass
self.state = 1838
self.h16()
self.state = 1841
self.match(sdpParser.COLON)
self.state = 1842
self.match(sdpParser.COLON)
self.state = 1844
self.h16()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 1913
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1910
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,212,self._ctx)
if la_ == 1:
self.state = 1845
self.h16()
self.state = 1846
self.match(sdpParser.COLON)
self.state = 1848
self.h16()
self.state = 1849
self.match(sdpParser.COLON)
self.state = 1851
self.h16()
self.state = 1852
self.match(sdpParser.COLON)
self.state = 1854
self.h16()
self.state = 1855
self.match(sdpParser.COLON)
self.state = 1857
self.h16()
self.state = 1858
self.match(sdpParser.COLON)
self.state = 1860
self.h16()
self.state = 1861
self.match(sdpParser.COLON)
pass
elif la_ == 2:
self.state = 1863
self.h16()
self.state = 1864
self.match(sdpParser.COLON)
self.state = 1866
self.h16()
self.state = 1867
self.match(sdpParser.COLON)
self.state = 1869
self.h16()
self.state = 1870
self.match(sdpParser.COLON)
self.state = 1872
self.h16()
self.state = 1873
self.match(sdpParser.COLON)
self.state = 1875
self.h16()
self.state = 1876
self.match(sdpParser.COLON)
pass
elif la_ == 3:
self.state = 1878
self.h16()
self.state = 1879
self.match(sdpParser.COLON)
self.state = 1881
self.h16()
self.state = 1882
self.match(sdpParser.COLON)
self.state = 1884
self.h16()
self.state = 1885
self.match(sdpParser.COLON)
self.state = 1887
self.h16()
self.state = 1888
self.match(sdpParser.COLON)
pass
elif la_ == 4:
self.state = 1890
self.h16()
self.state = 1891
self.match(sdpParser.COLON)
self.state = 1893
self.h16()
self.state = 1894
self.match(sdpParser.COLON)
self.state = 1896
self.h16()
self.state = 1897
self.match(sdpParser.COLON)
pass
elif la_ == 5:
self.state = 1899
self.h16()
self.state = 1900
self.match(sdpParser.COLON)
self.state = 1902
self.h16()
self.state = 1903
self.match(sdpParser.COLON)
pass
elif la_ == 6:
self.state = 1908
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,211,self._ctx)
if la_ == 1:
self.state = 1905
self.h16()
self.state = 1906
self.match(sdpParser.COLON)
pass
self.state = 1912
self.h16()
self.state = 1915
self.match(sdpParser.COLON)
self.state = 1916
self.match(sdpParser.COLON)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class H16Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def hexdig(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.HexdigContext)
else:
return self.getTypedRuleContext(sdpParser.HexdigContext,i)
def getRuleIndex(self):
return sdpParser.RULE_h16
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterH16" ):
listener.enterH16(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitH16" ):
listener.exitH16(self)
def h16(self):
localctx = sdpParser.H16Context(self, self._ctx, self.state)
self.enterRule(localctx, 300, self.RULE_h16)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1919
self.hexdig()
self.state = 1930
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,216,self._ctx)
if la_ == 1:
self.state = 1920
self.hexdig()
self.state = 1921
self.hexdig()
self.state = 1922
self.hexdig()
pass
elif la_ == 2:
self.state = 1924
self.hexdig()
self.state = 1925
self.hexdig()
pass
elif la_ == 3:
self.state = 1928
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 20)) & ~0x3f) == 0 and ((1 << (_la - 20)) & ((1 << (sdpParser.ZERO - 20)) | (1 << (sdpParser.ONE - 20)) | (1 << (sdpParser.TWO - 20)) | (1 << (sdpParser.THREE - 20)) | (1 << (sdpParser.FOUR - 20)) | (1 << (sdpParser.FIVE - 20)) | (1 << (sdpParser.SIX - 20)) | (1 << (sdpParser.SEVEN - 20)) | (1 << (sdpParser.EIGHT - 20)) | (1 << (sdpParser.NINE - 20)) | (1 << (sdpParser.CAP_A - 20)) | (1 << (sdpParser.CAP_B - 20)) | (1 << (sdpParser.CAP_C - 20)) | (1 << (sdpParser.CAP_D - 20)) | (1 << (sdpParser.CAP_E - 20)) | (1 << (sdpParser.CAP_F - 20)) | (1 << (sdpParser.A - 20)) | (1 << (sdpParser.B - 20)) | (1 << (sdpParser.C - 20)) | (1 << (sdpParser.D - 20)) | (1 << (sdpParser.E - 20)) | (1 << (sdpParser.F - 20)))) != 0):
self.state = 1927
self.hexdig()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ls32Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def h16(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.H16Context)
else:
return self.getTypedRuleContext(sdpParser.H16Context,i)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def ipv4address(self):
return self.getTypedRuleContext(sdpParser.Ipv4addressContext,0)
def getRuleIndex(self):
return sdpParser.RULE_ls32
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLs32" ):
listener.enterLs32(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLs32" ):
listener.exitLs32(self)
def ls32(self):
localctx = sdpParser.Ls32Context(self, self._ctx, self.state)
self.enterRule(localctx, 302, self.RULE_ls32)
try:
self.state = 1937
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,217,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1932
self.h16()
self.state = 1933
self.match(sdpParser.COLON)
self.state = 1934
self.h16()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1936
self.ipv4address()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Ipv4addressContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def dec_octet(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Dec_octetContext)
else:
return self.getTypedRuleContext(sdpParser.Dec_octetContext,i)
def PERIOD(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.PERIOD)
else:
return self.getToken(sdpParser.PERIOD, i)
def getRuleIndex(self):
return sdpParser.RULE_ipv4address
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIpv4address" ):
listener.enterIpv4address(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIpv4address" ):
listener.exitIpv4address(self)
def ipv4address(self):
localctx = sdpParser.Ipv4addressContext(self, self._ctx, self.state)
self.enterRule(localctx, 304, self.RULE_ipv4address)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1939
self.dec_octet()
self.state = 1940
self.match(sdpParser.PERIOD)
self.state = 1941
self.dec_octet()
self.state = 1942
self.match(sdpParser.PERIOD)
self.state = 1943
self.dec_octet()
self.state = 1944
self.match(sdpParser.PERIOD)
self.state = 1945
self.dec_octet()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dec_octetContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def digit(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.DigitContext)
else:
return self.getTypedRuleContext(sdpParser.DigitContext,i)
def ONE(self):
return self.getToken(sdpParser.ONE, 0)
def TWO(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.TWO)
else:
return self.getToken(sdpParser.TWO, i)
def THREE(self):
return self.getToken(sdpParser.THREE, 0)
def FOUR(self):
return self.getToken(sdpParser.FOUR, 0)
def FIVE(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.FIVE)
else:
return self.getToken(sdpParser.FIVE, i)
def SIX(self):
return self.getToken(sdpParser.SIX, 0)
def SEVEN(self):
return self.getToken(sdpParser.SEVEN, 0)
def EIGHT(self):
return self.getToken(sdpParser.EIGHT, 0)
def NINE(self):
return self.getToken(sdpParser.NINE, 0)
def ZERO(self):
return self.getToken(sdpParser.ZERO, 0)
def getRuleIndex(self):
return sdpParser.RULE_dec_octet
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDec_octet" ):
listener.enterDec_octet(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDec_octet" ):
listener.exitDec_octet(self)
def dec_octet(self):
localctx = sdpParser.Dec_octetContext(self, self._ctx, self.state)
self.enterRule(localctx, 306, self.RULE_dec_octet)
self._la = 0 # Token type
try:
self.state = 1961
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,218,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1947
self.digit()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1948
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 1949
self.digit()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1950
self.match(sdpParser.ONE)
self.state = 1951
self.digit()
self.state = 1952
self.digit()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1954
self.match(sdpParser.TWO)
self.state = 1955
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 1956
self.digit()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1957
self.match(sdpParser.TWO)
self.state = 1958
self.match(sdpParser.FIVE)
self.state = 1960
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Reg_nameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unreserved(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.UnreservedContext)
else:
return self.getTypedRuleContext(sdpParser.UnreservedContext,i)
def pct_encoded(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Pct_encodedContext)
else:
return self.getTypedRuleContext(sdpParser.Pct_encodedContext,i)
def sub_delims(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Sub_delimsContext)
else:
return self.getTypedRuleContext(sdpParser.Sub_delimsContext,i)
def getRuleIndex(self):
return sdpParser.RULE_reg_name
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterReg_name" ):
listener.enterReg_name(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitReg_name" ):
listener.exitReg_name(self)
def reg_name(self):
localctx = sdpParser.Reg_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 308, self.RULE_reg_name)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1968
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0):
self.state = 1966
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.DASH, sdpParser.PERIOD, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.UNDERSCORE, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.TILDE]:
self.state = 1963
self.unreserved()
pass
elif token in [sdpParser.PERCENT]:
self.state = 1964
self.pct_encoded()
pass
elif token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.SEMICOLON, sdpParser.EQUALS]:
self.state = 1965
self.sub_delims()
pass
else:
raise NoViableAltException(self)
self.state = 1970
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PathContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def path_abempty(self):
return self.getTypedRuleContext(sdpParser.Path_abemptyContext,0)
def path_absolute(self):
return self.getTypedRuleContext(sdpParser.Path_absoluteContext,0)
def path_noscheme(self):
return self.getTypedRuleContext(sdpParser.Path_noschemeContext,0)
def path_rootless(self):
return self.getTypedRuleContext(sdpParser.Path_rootlessContext,0)
def path_empty(self):
return self.getTypedRuleContext(sdpParser.Path_emptyContext,0)
def getRuleIndex(self):
return sdpParser.RULE_path
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPath" ):
listener.enterPath(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPath" ):
listener.exitPath(self)
def path(self):
localctx = sdpParser.PathContext(self, self._ctx, self.state)
self.enterRule(localctx, 310, self.RULE_path)
try:
self.state = 1976
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,221,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1971
self.path_abempty()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1972
self.path_absolute()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1973
self.path_noscheme()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1974
self.path_rootless()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1975
self.path_empty()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Path_abemptyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def segment(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SegmentContext)
else:
return self.getTypedRuleContext(sdpParser.SegmentContext,i)
def getRuleIndex(self):
return sdpParser.RULE_path_abempty
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPath_abempty" ):
listener.enterPath_abempty(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPath_abempty" ):
listener.exitPath_abempty(self)
def path_abempty(self):
localctx = sdpParser.Path_abemptyContext(self, self._ctx, self.state)
self.enterRule(localctx, 312, self.RULE_path_abempty)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1982
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.SLASH:
self.state = 1978
self.match(sdpParser.SLASH)
self.state = 1979
self.segment()
self.state = 1984
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Path_absoluteContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def segment_nz(self):
return self.getTypedRuleContext(sdpParser.Segment_nzContext,0)
def segment(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SegmentContext)
else:
return self.getTypedRuleContext(sdpParser.SegmentContext,i)
def getRuleIndex(self):
return sdpParser.RULE_path_absolute
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPath_absolute" ):
listener.enterPath_absolute(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPath_absolute" ):
listener.exitPath_absolute(self)
def path_absolute(self):
localctx = sdpParser.Path_absoluteContext(self, self._ctx, self.state)
self.enterRule(localctx, 314, self.RULE_path_absolute)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1985
self.match(sdpParser.SLASH)
self.state = 1994
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0):
self.state = 1986
self.segment_nz()
self.state = 1991
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.SLASH:
self.state = 1987
self.match(sdpParser.SLASH)
self.state = 1988
self.segment()
self.state = 1993
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Path_noschemeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def segment_nz_nc(self):
return self.getTypedRuleContext(sdpParser.Segment_nz_ncContext,0)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def segment(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SegmentContext)
else:
return self.getTypedRuleContext(sdpParser.SegmentContext,i)
def getRuleIndex(self):
return sdpParser.RULE_path_noscheme
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPath_noscheme" ):
listener.enterPath_noscheme(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPath_noscheme" ):
listener.exitPath_noscheme(self)
def path_noscheme(self):
localctx = sdpParser.Path_noschemeContext(self, self._ctx, self.state)
self.enterRule(localctx, 316, self.RULE_path_noscheme)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1996
self.segment_nz_nc()
self.state = 2001
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.SLASH:
self.state = 1997
self.match(sdpParser.SLASH)
self.state = 1998
self.segment()
self.state = 2003
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Path_rootlessContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def segment_nz(self):
return self.getTypedRuleContext(sdpParser.Segment_nzContext,0)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def segment(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.SegmentContext)
else:
return self.getTypedRuleContext(sdpParser.SegmentContext,i)
def getRuleIndex(self):
return sdpParser.RULE_path_rootless
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPath_rootless" ):
listener.enterPath_rootless(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPath_rootless" ):
listener.exitPath_rootless(self)
def path_rootless(self):
localctx = sdpParser.Path_rootlessContext(self, self._ctx, self.state)
self.enterRule(localctx, 318, self.RULE_path_rootless)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2004
self.segment_nz()
self.state = 2009
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==sdpParser.SLASH:
self.state = 2005
self.match(sdpParser.SLASH)
self.state = 2006
self.segment()
self.state = 2011
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Path_emptyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return sdpParser.RULE_path_empty
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPath_empty" ):
listener.enterPath_empty(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPath_empty" ):
listener.exitPath_empty(self)
def path_empty(self):
localctx = sdpParser.Path_emptyContext(self, self._ctx, self.state)
self.enterRule(localctx, 320, self.RULE_path_empty)
try:
self.enterOuterAlt(localctx, 1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SegmentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pchar(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.PcharContext)
else:
return self.getTypedRuleContext(sdpParser.PcharContext,i)
def getRuleIndex(self):
return sdpParser.RULE_segment
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSegment" ):
listener.enterSegment(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSegment" ):
listener.exitSegment(self)
def segment(self):
localctx = sdpParser.SegmentContext(self, self._ctx, self.state)
self.enterRule(localctx, 322, self.RULE_segment)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2017
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0):
self.state = 2014
self.pchar()
self.state = 2019
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Segment_nzContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pchar(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.PcharContext)
else:
return self.getTypedRuleContext(sdpParser.PcharContext,i)
def getRuleIndex(self):
return sdpParser.RULE_segment_nz
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSegment_nz" ):
listener.enterSegment_nz(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSegment_nz" ):
listener.exitSegment_nz(self)
def segment_nz(self):
localctx = sdpParser.Segment_nzContext(self, self._ctx, self.state)
self.enterRule(localctx, 324, self.RULE_segment_nz)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2021
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 2020
self.pchar()
self.state = 2023
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Segment_nz_ncContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unreserved(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.UnreservedContext)
else:
return self.getTypedRuleContext(sdpParser.UnreservedContext,i)
def pct_encoded(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Pct_encodedContext)
else:
return self.getTypedRuleContext(sdpParser.Pct_encodedContext,i)
def sub_delims(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.Sub_delimsContext)
else:
return self.getTypedRuleContext(sdpParser.Sub_delimsContext,i)
def AT(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.AT)
else:
return self.getToken(sdpParser.AT, i)
def getRuleIndex(self):
return sdpParser.RULE_segment_nz_nc
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSegment_nz_nc" ):
listener.enterSegment_nz_nc(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSegment_nz_nc" ):
listener.exitSegment_nz_nc(self)
def segment_nz_nc(self):
localctx = sdpParser.Segment_nz_ncContext(self, self._ctx, self.state)
self.enterRule(localctx, 326, self.RULE_segment_nz_nc)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2029
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 2029
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.DASH, sdpParser.PERIOD, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.UNDERSCORE, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.TILDE]:
self.state = 2025
self.unreserved()
pass
elif token in [sdpParser.PERCENT]:
self.state = 2026
self.pct_encoded()
pass
elif token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.SEMICOLON, sdpParser.EQUALS]:
self.state = 2027
self.sub_delims()
pass
elif token in [sdpParser.AT]:
self.state = 2028
self.match(sdpParser.AT)
pass
else:
raise NoViableAltException(self)
self.state = 2031
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PcharContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unreserved(self):
return self.getTypedRuleContext(sdpParser.UnreservedContext,0)
def pct_encoded(self):
return self.getTypedRuleContext(sdpParser.Pct_encodedContext,0)
def sub_delims(self):
return self.getTypedRuleContext(sdpParser.Sub_delimsContext,0)
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def getRuleIndex(self):
return sdpParser.RULE_pchar
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPchar" ):
listener.enterPchar(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPchar" ):
listener.exitPchar(self)
def pchar(self):
localctx = sdpParser.PcharContext(self, self._ctx, self.state)
self.enterRule(localctx, 328, self.RULE_pchar)
try:
self.state = 2038
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.DASH, sdpParser.PERIOD, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.UNDERSCORE, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.TILDE]:
self.enterOuterAlt(localctx, 1)
self.state = 2033
self.unreserved()
pass
elif token in [sdpParser.PERCENT]:
self.enterOuterAlt(localctx, 2)
self.state = 2034
self.pct_encoded()
pass
elif token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.SEMICOLON, sdpParser.EQUALS]:
self.enterOuterAlt(localctx, 3)
self.state = 2035
self.sub_delims()
pass
elif token in [sdpParser.COLON]:
self.enterOuterAlt(localctx, 4)
self.state = 2036
self.match(sdpParser.COLON)
pass
elif token in [sdpParser.AT]:
self.enterOuterAlt(localctx, 5)
self.state = 2037
self.match(sdpParser.AT)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class QueryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pchar(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.PcharContext)
else:
return self.getTypedRuleContext(sdpParser.PcharContext,i)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def QUESTION(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.QUESTION)
else:
return self.getToken(sdpParser.QUESTION, i)
def getRuleIndex(self):
return sdpParser.RULE_query
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterQuery" ):
listener.enterQuery(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitQuery" ):
listener.exitQuery(self)
def query(self):
localctx = sdpParser.QueryContext(self, self._ctx, self.state)
self.enterRule(localctx, 330, self.RULE_query)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2045
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.QUESTION) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0):
self.state = 2043
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.EQUALS, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.UNDERSCORE, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.TILDE]:
self.state = 2040
self.pchar()
pass
elif token in [sdpParser.SLASH]:
self.state = 2041
self.match(sdpParser.SLASH)
pass
elif token in [sdpParser.QUESTION]:
self.state = 2042
self.match(sdpParser.QUESTION)
pass
else:
raise NoViableAltException(self)
self.state = 2047
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fragment_1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pchar(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.PcharContext)
else:
return self.getTypedRuleContext(sdpParser.PcharContext,i)
def SLASH(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.SLASH)
else:
return self.getToken(sdpParser.SLASH, i)
def QUESTION(self, i:int=None):
if i is None:
return self.getTokens(sdpParser.QUESTION)
else:
return self.getToken(sdpParser.QUESTION, i)
def getRuleIndex(self):
return sdpParser.RULE_fragment_1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFragment_1" ):
listener.enterFragment_1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFragment_1" ):
listener.exitFragment_1(self)
def fragment_1(self):
localctx = sdpParser.Fragment_1Context(self, self._ctx, self.state)
self.enterRule(localctx, 332, self.RULE_fragment_1)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2053
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.PERCENT) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.DASH) | (1 << sdpParser.PERIOD) | (1 << sdpParser.SLASH) | (1 << sdpParser.ZERO) | (1 << sdpParser.ONE) | (1 << sdpParser.TWO) | (1 << sdpParser.THREE) | (1 << sdpParser.FOUR) | (1 << sdpParser.FIVE) | (1 << sdpParser.SIX) | (1 << sdpParser.SEVEN) | (1 << sdpParser.EIGHT) | (1 << sdpParser.NINE) | (1 << sdpParser.COLON) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS) | (1 << sdpParser.QUESTION) | (1 << sdpParser.AT) | (1 << sdpParser.CAP_A) | (1 << sdpParser.CAP_B) | (1 << sdpParser.CAP_C) | (1 << sdpParser.CAP_D) | (1 << sdpParser.CAP_E) | (1 << sdpParser.CAP_F) | (1 << sdpParser.CAP_G) | (1 << sdpParser.CAP_H) | (1 << sdpParser.CAP_I) | (1 << sdpParser.CAP_J) | (1 << sdpParser.CAP_K) | (1 << sdpParser.CAP_L) | (1 << sdpParser.CAP_M) | (1 << sdpParser.CAP_N) | (1 << sdpParser.CAP_O) | (1 << sdpParser.CAP_P) | (1 << sdpParser.CAP_Q) | (1 << sdpParser.CAP_R) | (1 << sdpParser.CAP_S) | (1 << sdpParser.CAP_T) | (1 << sdpParser.CAP_U) | (1 << sdpParser.CAP_V) | (1 << sdpParser.CAP_W) | (1 << sdpParser.CAP_X) | (1 << sdpParser.CAP_Y) | (1 << sdpParser.CAP_Z))) != 0) or ((((_la - 67)) & ~0x3f) == 0 and ((1 << (_la - 67)) & ((1 << (sdpParser.UNDERSCORE - 67)) | (1 << (sdpParser.A - 67)) | (1 << (sdpParser.B - 67)) | (1 << (sdpParser.C - 67)) | (1 << (sdpParser.D - 67)) | (1 << (sdpParser.E - 67)) | (1 << (sdpParser.F - 67)) | (1 << (sdpParser.G - 67)) | (1 << (sdpParser.H - 67)) | (1 << (sdpParser.I - 67)) | (1 << (sdpParser.J - 67)) | (1 << (sdpParser.K - 67)) | (1 << (sdpParser.L - 67)) | (1 << (sdpParser.M - 67)) | (1 << (sdpParser.N - 67)) | (1 << (sdpParser.O - 67)) | (1 << (sdpParser.P - 67)) | (1 << (sdpParser.Q - 67)) | (1 << (sdpParser.R - 67)) | (1 << (sdpParser.S - 67)) | (1 << (sdpParser.T - 67)) | (1 << (sdpParser.U - 67)) | (1 << (sdpParser.V - 67)) | (1 << (sdpParser.W - 67)) | (1 << (sdpParser.X - 67)) | (1 << (sdpParser.Y - 67)) | (1 << (sdpParser.Z - 67)) | (1 << (sdpParser.TILDE - 67)))) != 0):
self.state = 2051
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.PERCENT, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.DASH, sdpParser.PERIOD, sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE, sdpParser.COLON, sdpParser.SEMICOLON, sdpParser.EQUALS, sdpParser.AT, sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.UNDERSCORE, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z, sdpParser.TILDE]:
self.state = 2048
self.pchar()
pass
elif token in [sdpParser.SLASH]:
self.state = 2049
self.match(sdpParser.SLASH)
pass
elif token in [sdpParser.QUESTION]:
self.state = 2050
self.match(sdpParser.QUESTION)
pass
else:
raise NoViableAltException(self)
self.state = 2055
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Pct_encodedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def PERCENT(self):
return self.getToken(sdpParser.PERCENT, 0)
def hexdig(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(sdpParser.HexdigContext)
else:
return self.getTypedRuleContext(sdpParser.HexdigContext,i)
def getRuleIndex(self):
return sdpParser.RULE_pct_encoded
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPct_encoded" ):
listener.enterPct_encoded(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPct_encoded" ):
listener.exitPct_encoded(self)
def pct_encoded(self):
localctx = sdpParser.Pct_encodedContext(self, self._ctx, self.state)
self.enterRule(localctx, 334, self.RULE_pct_encoded)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2056
self.match(sdpParser.PERCENT)
self.state = 2057
self.hexdig()
self.state = 2058
self.hexdig()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnreservedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def alpha(self):
return self.getTypedRuleContext(sdpParser.AlphaContext,0)
def digit(self):
return self.getTypedRuleContext(sdpParser.DigitContext,0)
def DASH(self):
return self.getToken(sdpParser.DASH, 0)
def PERIOD(self):
return self.getToken(sdpParser.PERIOD, 0)
def UNDERSCORE(self):
return self.getToken(sdpParser.UNDERSCORE, 0)
def TILDE(self):
return self.getToken(sdpParser.TILDE, 0)
def getRuleIndex(self):
return sdpParser.RULE_unreserved
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnreserved" ):
listener.enterUnreserved(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnreserved" ):
listener.exitUnreserved(self)
def unreserved(self):
localctx = sdpParser.UnreservedContext(self, self._ctx, self.state)
self.enterRule(localctx, 336, self.RULE_unreserved)
try:
self.state = 2066
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.CAP_A, sdpParser.CAP_B, sdpParser.CAP_C, sdpParser.CAP_D, sdpParser.CAP_E, sdpParser.CAP_F, sdpParser.CAP_G, sdpParser.CAP_H, sdpParser.CAP_I, sdpParser.CAP_J, sdpParser.CAP_K, sdpParser.CAP_L, sdpParser.CAP_M, sdpParser.CAP_N, sdpParser.CAP_O, sdpParser.CAP_P, sdpParser.CAP_Q, sdpParser.CAP_R, sdpParser.CAP_S, sdpParser.CAP_T, sdpParser.CAP_U, sdpParser.CAP_V, sdpParser.CAP_W, sdpParser.CAP_X, sdpParser.CAP_Y, sdpParser.CAP_Z, sdpParser.A, sdpParser.B, sdpParser.C, sdpParser.D, sdpParser.E, sdpParser.F, sdpParser.G, sdpParser.H, sdpParser.I, sdpParser.J, sdpParser.K, sdpParser.L, sdpParser.M, sdpParser.N, sdpParser.O, sdpParser.P, sdpParser.Q, sdpParser.R, sdpParser.S, sdpParser.T, sdpParser.U, sdpParser.V, sdpParser.W, sdpParser.X, sdpParser.Y, sdpParser.Z]:
self.enterOuterAlt(localctx, 1)
self.state = 2060
self.alpha()
pass
elif token in [sdpParser.ZERO, sdpParser.ONE, sdpParser.TWO, sdpParser.THREE, sdpParser.FOUR, sdpParser.FIVE, sdpParser.SIX, sdpParser.SEVEN, sdpParser.EIGHT, sdpParser.NINE]:
self.enterOuterAlt(localctx, 2)
self.state = 2061
self.digit()
pass
elif token in [sdpParser.DASH]:
self.enterOuterAlt(localctx, 3)
self.state = 2062
self.match(sdpParser.DASH)
pass
elif token in [sdpParser.PERIOD]:
self.enterOuterAlt(localctx, 4)
self.state = 2063
self.match(sdpParser.PERIOD)
pass
elif token in [sdpParser.UNDERSCORE]:
self.enterOuterAlt(localctx, 5)
self.state = 2064
self.match(sdpParser.UNDERSCORE)
pass
elif token in [sdpParser.TILDE]:
self.enterOuterAlt(localctx, 6)
self.state = 2065
self.match(sdpParser.TILDE)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ReservedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def gen_delims(self):
return self.getTypedRuleContext(sdpParser.Gen_delimsContext,0)
def sub_delims(self):
return self.getTypedRuleContext(sdpParser.Sub_delimsContext,0)
def getRuleIndex(self):
return sdpParser.RULE_reserved
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterReserved" ):
listener.enterReserved(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitReserved" ):
listener.exitReserved(self)
def reserved(self):
localctx = sdpParser.ReservedContext(self, self._ctx, self.state)
self.enterRule(localctx, 338, self.RULE_reserved)
try:
self.state = 2070
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [sdpParser.HASH, sdpParser.SLASH, sdpParser.COLON, sdpParser.QUESTION, sdpParser.AT, sdpParser.LEFT_BRACE, sdpParser.RIGHT_BRACE]:
self.enterOuterAlt(localctx, 1)
self.state = 2068
self.gen_delims()
pass
elif token in [sdpParser.EXCLAMATION, sdpParser.DOLLAR, sdpParser.AMPERSAND, sdpParser.APOSTROPHE, sdpParser.LEFT_PAREN, sdpParser.RIGHT_PAREN, sdpParser.ASTERISK, sdpParser.PLUS, sdpParser.COMMA, sdpParser.SEMICOLON, sdpParser.EQUALS]:
self.enterOuterAlt(localctx, 2)
self.state = 2069
self.sub_delims()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Gen_delimsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def COLON(self):
return self.getToken(sdpParser.COLON, 0)
def SLASH(self):
return self.getToken(sdpParser.SLASH, 0)
def QUESTION(self):
return self.getToken(sdpParser.QUESTION, 0)
def HASH(self):
return self.getToken(sdpParser.HASH, 0)
def LEFT_BRACE(self):
return self.getToken(sdpParser.LEFT_BRACE, 0)
def RIGHT_BRACE(self):
return self.getToken(sdpParser.RIGHT_BRACE, 0)
def AT(self):
return self.getToken(sdpParser.AT, 0)
def getRuleIndex(self):
return sdpParser.RULE_gen_delims
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGen_delims" ):
listener.enterGen_delims(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGen_delims" ):
listener.exitGen_delims(self)
def gen_delims(self):
localctx = sdpParser.Gen_delimsContext(self, self._ctx, self.state)
self.enterRule(localctx, 340, self.RULE_gen_delims)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2072
_la = self._input.LA(1)
if not(((((_la - 7)) & ~0x3f) == 0 and ((1 << (_la - 7)) & ((1 << (sdpParser.HASH - 7)) | (1 << (sdpParser.SLASH - 7)) | (1 << (sdpParser.COLON - 7)) | (1 << (sdpParser.QUESTION - 7)) | (1 << (sdpParser.AT - 7)) | (1 << (sdpParser.LEFT_BRACE - 7)) | (1 << (sdpParser.RIGHT_BRACE - 7)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Sub_delimsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EXCLAMATION(self):
return self.getToken(sdpParser.EXCLAMATION, 0)
def DOLLAR(self):
return self.getToken(sdpParser.DOLLAR, 0)
def AMPERSAND(self):
return self.getToken(sdpParser.AMPERSAND, 0)
def APOSTROPHE(self):
return self.getToken(sdpParser.APOSTROPHE, 0)
def LEFT_PAREN(self):
return self.getToken(sdpParser.LEFT_PAREN, 0)
def RIGHT_PAREN(self):
return self.getToken(sdpParser.RIGHT_PAREN, 0)
def ASTERISK(self):
return self.getToken(sdpParser.ASTERISK, 0)
def PLUS(self):
return self.getToken(sdpParser.PLUS, 0)
def COMMA(self):
return self.getToken(sdpParser.COMMA, 0)
def SEMICOLON(self):
return self.getToken(sdpParser.SEMICOLON, 0)
def EQUALS(self):
return self.getToken(sdpParser.EQUALS, 0)
def getRuleIndex(self):
return sdpParser.RULE_sub_delims
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSub_delims" ):
listener.enterSub_delims(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSub_delims" ):
listener.exitSub_delims(self)
def sub_delims(self):
localctx = sdpParser.Sub_delimsContext(self, self._ctx, self.state)
self.enterRule(localctx, 342, self.RULE_sub_delims)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 2074
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << sdpParser.EXCLAMATION) | (1 << sdpParser.DOLLAR) | (1 << sdpParser.AMPERSAND) | (1 << sdpParser.APOSTROPHE) | (1 << sdpParser.LEFT_PAREN) | (1 << sdpParser.RIGHT_PAREN) | (1 << sdpParser.ASTERISK) | (1 << sdpParser.PLUS) | (1 << sdpParser.COMMA) | (1 << sdpParser.SEMICOLON) | (1 << sdpParser.EQUALS))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
| 42.223196
| 8,656
| 0.565411
| 111,008
| 886,096
| 4.393764
| 0.041898
| 0.093758
| 0.067646
| 0.101469
| 0.809037
| 0.787179
| 0.744486
| 0.73258
| 0.712035
| 0.676364
| 0
| 0.107819
| 0.295189
| 886,096
| 20,985
| 8,657
| 42.225208
| 0.673157
| 0.001298
| 0
| 0.679893
| 1
| 0.071135
| 0.080851
| 0.069533
| 0
| 0
| 0.000683
| 0
| 0
| 1
| 0.188551
| false
| 0.018438
| 0.000311
| 0.110315
| 0.4184
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
b8449076f33968461dacc13c448d0e534b33d06e
| 7,533
|
py
|
Python
|
tests/flow/test_flow.py
|
xiongma/bert2tf
|
105fd1524edb703bf68aec8fde289de5923e1f78
|
[
"Apache-2.0"
] | 7
|
2021-08-05T16:35:08.000Z
|
2022-01-04T03:26:10.000Z
|
tests/flow/test_flow.py
|
xiongma/bert2tf
|
105fd1524edb703bf68aec8fde289de5923e1f78
|
[
"Apache-2.0"
] | 96
|
2021-08-06T08:32:09.000Z
|
2022-01-21T11:07:25.000Z
|
tests/flow/test_flow.py
|
xiongma/bert2tf
|
105fd1524edb703bf68aec8fde289de5923e1f78
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from bert2tf import Flow
from tests import Bert2TFTestCase
class MyTestCase(Bert2TFTestCase):
def test_create_flow(self):
Flow()
Flow().add(use='BaseExecutor', name='a1', replicas=1, timeout_ready=600000)
def test_build_flow_with_customize(self):
flow = (Flow()
.add(use='BaseExecutor',
name='a1',
replicas=10,
runtime_backend='thread',
timeout_ready=600000))
with flow:
pass
def test_flow_with_rest_gateway(self):
flow = (Flow(rest_api=True)
.add(use='BaseExecutor',
name='a1',
replicas=1,
runtime_backend='thread',
timeout_ready=600000))
with flow:
pass
@unittest.skip('just run on local machine')
def test_build_flow_from_yaml(self):
with Flow.load_config('yaml/flow.yml'):
pass
@unittest.skip('just run on local machine')
def test_grpc_request_with_tokenizer(self):
flow = Flow().add(use='BertTokenizer',
use_with={'vocab_file_path': '../resources/pre_models/roberta_wwm_ext/vocab.txt'},
name='tokenizer')
with flow:
result = flow.predict(['aaaa', '第二十八次集体学习'])
self.assertEqual(isinstance(result, list), True)
self.assertEqual(isinstance(result[0], list), True)
self.assertEqual(len(result[0][0]), 4)
result = flow.predict([['aaaa'], ['第二十八次集体学习']])
self.assertEqual(isinstance(result, list), True)
self.assertEqual(isinstance(result[0], list), True)
self.assertEqual(len(result[0][0]), 4)
@unittest.skip('just run on local machine')
def test_grpc_request_with_model(self):
flow = (Flow()
.add(use='Bert',
use_with={'config': '../resources/pre_models/roberta_wwm_ext/bert_config.json',
'pretrained_weights_path': '../resources/pre_models/roberta_wwm_ext/bert_model.ckpt'},
name='bert',
on_gpu=True,
timeout_ready=1000000))
with flow:
input_ids = [[101, 5018, 753, 1282, 1061, 3613, 7415, 860, 2110, 739, 102],
[101, 5018, 753, 1282, 1061, 3613, 7415, 860, 2110, 739, 102]]
attention_masks = [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
result = flow.predict([input_ids, attention_masks])
self.assertEqual(isinstance(result, list), True)
self.assertEqual(len(result), 2)
self.assertEqual(isinstance(result[0], list), True)
self.assertEqual(len(result[0][0]), 1)
self.assertEqual(len(result[0][0][0]), 11)
self.assertEqual(len(result[0][0][0][0]), 768)
@unittest.skip('just run on local machine')
def test_grpc_request_with_full_flow(self):
flow = (Flow()
.add(use='BertTokenizer',
use_with={'vocab_file_path': '../resources/pre_models/roberta_wwm_ext/vocab.txt'},
name='tokenizer')
.add(use='Bert',
use_with={'config': '../resources/pre_models/roberta_wwm_ext/bert_config.json',
'pretrained_weights_path': '../resources/pre_models/roberta_wwm_ext/bert_model.ckpt'},
name='bert',
replicas=1,
on_gpu=True,
device_map=[0, 2, 3],
timeout_ready=1000000)
)
with flow:
result = flow.predict(['aaaa', '第二十八次集体学习'])
self.assertEqual(isinstance(result, list), True)
self.assertEqual(len(result), 2)
self.assertEqual(isinstance(result[0], list), True)
self.assertEqual(len(result[0][0]), 1)
self.assertEqual(len(result[0][0][0]), 4)
self.assertEqual(len(result[0][0][0][0]), 768)
self.assertEqual(len(result[1][0]), 1)
self.assertEqual(len(result[1][0][0]), 11)
self.assertEqual(len(result[1][0][0][0]), 768)
@unittest.skip('just run on local machine')
def test_flow_rest_request_predict(self):
flow = (Flow(rest_api=True)
.add(use='BertTokenizer',
use_with={'vocab_file_path': '../resources/pre_models/roberta_wwm_ext/vocab.txt'},
name='tokenizer',
runtime_backend='thread',
timeout_ready=1000000)
.add(use='Bert',
use_with={'config': '../resources/pre_models/roberta_wwm_ext/bert_config.json',
'pretrained_weights_path': '../resources/pre_models/roberta_wwm_ext/bert_model.ckpt'},
name='bert',
replicas=1,
on_gpu=True,
device_map=[0, 2, 3],
timeout_ready=1000000)
)
with flow:
import requests
result = \
requests.post('http://0.0.0.0:5000/webapi', json={'inputs': [['第二十八次集体学习'], ['第二十八次集体学习']]}).json()[
'outputs']
self.assertEqual(isinstance(result, list), True)
self.assertEqual(len(result), 2)
self.assertEqual(isinstance(result[0], list), True)
self.assertEqual(len(result[0][0]), 1)
self.assertEqual(len(result[0][0][0]), 4)
self.assertEqual(len(result[0][0][0][0]), 768)
self.assertEqual(len(result[1][0]), 1)
self.assertEqual(len(result[1][0][0]), 11)
self.assertEqual(len(result[1][0][0][0]), 768)
def test_close_flow(self):
flow = (Flow().add(use='BaseExecutor',
name='a1',
runtime_backend='thread',
replicas=10,
timeout_ready=600000))
flow.build()
flow.close()
flow = Flow(rest_api=True).add(use='BaseExecutor',
name='a1',
runtime_backend='thread',
replicas=10,
timeout_ready=600000)
flow.build()
flow.close()
def test_build_complex_flow(self):
flow = (Flow()
.add(use='BaseExecutor',
name='a1',
runtime_backend='thread',
replicas=5)
.add(use='BaseExecutor',
name='a2',
replicas=5,
runtime_backend='thread',
needs='gateway')
.add(use='BaseExecutor',
name='a3',
replicas=5,
runtime_backend='thread',
needs=['a1', 'a2'])
.add(use='BaseExecutor',
name='a4',
runtime_backend='thread',
replicas=5)
.add(use='BaseExecutor',
name='a5',
runtime_backend='thread',
replicas=5)
)
with flow:
pass
| 40.069149
| 117
| 0.49157
| 774
| 7,533
| 4.630491
| 0.147287
| 0.125558
| 0.100446
| 0.133929
| 0.847935
| 0.820033
| 0.799944
| 0.796596
| 0.789063
| 0.708984
| 0
| 0.06166
| 0.377804
| 7,533
| 187
| 118
| 40.283422
| 0.703008
| 0
| 0
| 0.736196
| 0
| 0
| 0.152263
| 0.072879
| 0
| 0
| 0
| 0
| 0.184049
| 1
| 0.06135
| false
| 0.02454
| 0.02454
| 0
| 0.092025
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b846f6b6b3ab31f3d7ae643f6fbc4b73f46c21a6
| 10,373
|
py
|
Python
|
upsampling/data_reuse.py
|
icolbert/upsampling
|
9c16ac54897376f24af76c544600bf8c0dac3e71
|
[
"MIT"
] | 4
|
2021-07-22T02:37:07.000Z
|
2022-03-27T15:16:33.000Z
|
upsampling/data_reuse.py
|
icolbert/upsampling
|
9c16ac54897376f24af76c544600bf8c0dac3e71
|
[
"MIT"
] | null | null | null |
upsampling/data_reuse.py
|
icolbert/upsampling
|
9c16ac54897376f24af76c544600bf8c0dac3e71
|
[
"MIT"
] | null | null | null |
import numpy as np
def ceil(x:int, y:int) -> int:
return int(np.ceil(x / y))
def sub_pixel_convolution_data_reuse_patterns(upsampling_factor, height, in_channels, kernel_size, return_total:bool = True, width:int = None):
"""
input:
upsampling_factor:int - the upsampling factor
height:int - the height of the input image
width:int - the width of the input image
in_channels:int - the number of input channels of the image
kernel_size:int - the size of the kernel (assuming square)
output:
M:int - the compute requirements
W:int - the weight requirements
A:int - the activation requirements
P:int - the post-processing requirements from the pixel shuffle
"""
width = height if width is None else width
M = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
P = 2 * pow(upsampling_factor, 2) * (height * width) * in_channels # plus pixel shuffle post-processing
if not return_total:
return M, W, A, P
return M, W, A + P
def NN_resize_convolution_data_reuse_patterns(upsampling_factor, height, in_channels, kernel_size, return_total:bool = True, width:int = None):
"""
input:
upsampling_factor:int - the upsampling factor
height:int - the height of the input image
width:int - the width of the input image
in_channels:int - the number of input channels of the image
kernel_size:int - the size of the kernel (assuming square)
output:
M:int - the compute requirements
W:int - the weight requirements
A:int - the activation requirements
P:int - the pre-processing requirements from the resize
"""
width = height if width is None else width
M = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2) * (height * width)
W = pow(kernel_size, 2) * pow(in_channels, 2)
A = 2 * pow(upsampling_factor, 2) * (height * width) * in_channels
P = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels # plus nearest neighbor intepolation pre-processing
if not return_total:
return M, W, A, P
return M, W, A + P
def standard_deconvolution_data_reuse_patterns(upsampling_factor, height, in_channels, kernel_size, original_operator="D-SP", width:int = None):
"""
input:
upsampling_factor:int - the upsampling factor
height:int - the height of the input image
width:int - the width of the input image
in_channels:int - the number of input channels of the image
kernel_size:int - the size of the kernel (assuming square)
output:
M:int - the compute requirements
W:int - the weight requirements
A:int - the activation requirements
"""
width = height if width is None else width
if original_operator == "D-SP":
# Kd = Kc * upsampling_factor, S = upsampling_factor, P = upsampling_factor
M = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
elif original_operator == "D-NN":
# Kd = upsampling_factor + kernel_size - 1, S = upsampling_factor, P = 1
M = pow(upsampling_factor + kernel_size - 1, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor + kernel_size - 1, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
else:
raise NotImplementedError(f"{original_operator} is not yet supported.")
def fractionally_strided_deconvolution_data_reuse_patterns(upsampling_factor, height, in_channels, kernel_size, original_operator="D-SP", width:int = None):
"""
input:
upsampling_factor:int - the upsampling factor
height:int - the height of the input image
width:int - the width of the input image
in_channels:int - the number of input channels of the image
kernel_size:int - the size of the kernel (assuming square)
output:
M:int - the compute requirements
W:int - the weight requirements
A:int - the activation requirements
"""
width = height if width is None else width
if original_operator == "D-SP":
# Kd = Kc * upsampling_factor, S = upsampling_factor, P = upsampling_factor
M = pow(upsampling_factor, 4) * pow(kernel_size, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2)
A = (pow(height + (height - 1)*(upsampling_factor - 1), 2) + pow(upsampling_factor, 2) * (height * width)) * in_channels
return M, W, A
elif original_operator == "D-NN":
# Kd = upsampling_factor + kernel_size - 1, S = upsampling_factor, P = 1
M = pow(upsampling_factor + kernel_size - 1, 2) * pow(upsampling_factor, 2) * (height * width) * pow(in_channels, 2)
W = pow(upsampling_factor + kernel_size - 1, 2) * pow(in_channels, 2)
A = (pow(height + (height - 1)*(upsampling_factor - 1), 2) + pow(upsampling_factor, 2) * (height * width)) * in_channels
return M, W, A
else:
raise NotImplementedError(f"{original_operator} is not yet supported.")
def reverse_looping_deconvolution_data_reuse_patterns(upsampling_factor, height, in_channels, kernel_size, original_operator="D-SP", width:int = None):
"""
input:
upsampling_factor:int - the upsampling factor
height:int - the height of the input image
width:int - the width of the input image
in_channels:int - the number of input channels of the image
kernel_size:int - the size of the kernel (assuming square)
output:
M:int - the compute requirements
W:int - the weight requirements
A:int - the activation requirements
"""
width = height if width is None else width
if original_operator == "D-SP":
# Kd = Kc * upsampling_factor, S = upsampling_factor, P = upsampling_factor
M = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
elif original_operator == "D-NN":
# Kd = upsampling_factor + kernel_size - 1, S = upsampling_factor, P = 1
M = pow(upsampling_factor + kernel_size - 1, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor + kernel_size - 1, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
else:
raise NotImplementedError(f"{original_operator} is not yet supported.")
def reverse_looping_deconvolution_2_data_reuse_patterns(upsampling_factor, height, in_channels, kernel_size, original_operator="D-SP", width:int = None):
"""
input:
upsampling_factor:int - the upsampling factor
height:int - the height of the input image
width:int - the width of the input image
in_channels:int - the number of input channels of the image
kernel_size:int - the size of the kernel (assuming square)
output:
M:int - the compute requirements
W:int - the weight requirements
A:int - the activation requirements
"""
width = height if width is None else width
if original_operator == "D-SP":
# Kd = Kc * upsampling_factor, S = upsampling_factor, P = upsampling_factor
M = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
elif original_operator == "D-NN":
# Kd = upsampling_factor + kernel_size - 1, S = upsampling_factor, P = 1
M = pow(ceil(upsampling_factor + kernel_size - 1, upsampling_factor), 2) * pow(in_channels, 2) * (height * width) * pow(upsampling_factor, 2)
W = pow(upsampling_factor + kernel_size - 1, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
else:
raise NotImplementedError(f"{original_operator} is not yet supported.")
def transforming_deconvolution_to_convolution_data_reuse_patterns(upsampling_factor, height, in_channels, kernel_size, original_operator="D-SP", width:int = None):
"""
input:
upsampling_factor:int - the upsampling factor
height:int - the height of the input image
width:int - the width of the input image
in_channels:int - the number of input channels of the image
kernel_size:int - the size of the kernel (assuming square)
output:
M:int - the compute requirements
W:int - the weight requirements
A:int - the activation requirements
"""
width = height if width is None else width
if original_operator == "D-SP":
# Kd = Kc * upsampling_factor, S = upsampling_factor, P = upsampling_factor
M = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2) * (height * width)
W = pow(upsampling_factor, 2) * pow(kernel_size, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
elif original_operator == "D-NN":
# Kd = upsampling_factor + kernel_size - 1, S = upsampling_factor, P = 1
M = pow(ceil(upsampling_factor + kernel_size - 1, upsampling_factor), 2) * pow(upsampling_factor, 2) * pow(in_channels, 2) * (height * width)
W = pow(ceil(upsampling_factor + kernel_size - 1, upsampling_factor), 2) * pow(upsampling_factor, 2) * pow(in_channels, 2)
A = (1 + pow(upsampling_factor, 2)) * (height * width) * in_channels
return M, W, A
else:
raise NotImplementedError(f"{original_operator} is not yet supported.")
| 49.631579
| 163
| 0.654584
| 1,445
| 10,373
| 4.532872
| 0.058131
| 0.224733
| 0.110229
| 0.091603
| 0.956336
| 0.956336
| 0.956336
| 0.956336
| 0.952061
| 0.922137
| 0
| 0.015072
| 0.245252
| 10,373
| 209
| 164
| 49.631579
| 0.821561
| 0.359009
| 0
| 0.791209
| 0
| 0
| 0.042245
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.087912
| false
| 0
| 0.010989
| 0.010989
| 0.263736
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b8a00c01a141e3b9277595ea84c1048d5c98ec98
| 14,664
|
py
|
Python
|
accessories.py
|
randyrollofson/happy-hippos
|
ac5a54e967fbd7d757b73ec47727067572c12597
|
[
"MIT"
] | null | null | null |
accessories.py
|
randyrollofson/happy-hippos
|
ac5a54e967fbd7d757b73ec47727067572c12597
|
[
"MIT"
] | null | null | null |
accessories.py
|
randyrollofson/happy-hippos
|
ac5a54e967fbd7d757b73ec47727067572c12597
|
[
"MIT"
] | null | null | null |
import colors
xx = colors.xx
ol = colors.ol
wh = colors.ey
gr = colors.gr
ye = colors.ye
r1 = colors.r1
r2 = colors.r2
r3 = colors.r3
sunglasses = [
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, ol, ol, ol, ol, ol, ol, ol, ol, ol, ol, ol, ol, ol, ol, ol, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, ol, ol, ol, ol, xx, xx, ol, ol, ol, ol, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, ol, ol, ol, ol, xx, xx, ol, ol, ol, ol, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, ol, ol, ol, ol, xx, xx, ol, ol, ol, ol, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx]
]
bird = [
[xx, xx, xx, xx, xx, wh, wh, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, wh, ol, wh, gr, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, wh, wh, wh, wh, ye, ye, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, wh, gr, wh, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, wh, gr, ye, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[wh, wh, xx, xx, ye, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, ye, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx]
]
headphones = [
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, r1, r3, r3, r2, r2, r2, r2, r2, r2, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, r2, xx, xx, xx, xx, xx, xx, xx, xx, xx, r2, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, r1, r2, r2, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, r2, r2, r1, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, r2, r3, r2, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, r2, r3, r2, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, r3, r2, r2, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, r3, r2, r2, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx],
[xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx]
]
| 120.196721
| 141
| 0.480906
| 3,497
| 14,664
| 2.016586
| 0.004003
| 1.900737
| 2.828134
| 3.744753
| 0.980715
| 0.97958
| 0.97958
| 0.97958
| 0.97958
| 0.977879
| 0
| 0.003257
| 0.267185
| 14,664
| 122
| 142
| 120.196721
| 0.652987
| 0
| 0
| 0.760684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008547
| 0
| 0.008547
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
b8a32229f9c9502e684a21620aded91f4c3fdc75
| 2,588
|
py
|
Python
|
classy/migrations/0023_auto_20180306_1103.py
|
Krocodial/DSC
|
91063b06b536e732e655ce7f1ad0b7c2caa61e0d
|
[
"Apache-2.0"
] | null | null | null |
classy/migrations/0023_auto_20180306_1103.py
|
Krocodial/DSC
|
91063b06b536e732e655ce7f1ad0b7c2caa61e0d
|
[
"Apache-2.0"
] | null | null | null |
classy/migrations/0023_auto_20180306_1103.py
|
Krocodial/DSC
|
91063b06b536e732e655ce7f1ad0b7c2caa61e0d
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0.1 on 2018-03-06 19:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('classy', '0022_auto_20180306_1100'),
]
operations = [
migrations.AlterField(
model_name='classification',
name='column_name',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification',
name='created_by',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification',
name='datasource_description',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='classification',
name='schema',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification',
name='table_name',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification_logs',
name='state',
field=models.CharField(max_length=15, null=True),
),
migrations.AlterField(
model_name='classification_logs',
name='user_id',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='classification_review',
name='classification_name',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification_review',
name='column_name',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification_review',
name='datasource_description',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='classification_review',
name='schema',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification_review',
name='table_name',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='classification_review',
name='user',
field=models.CharField(max_length=50, null=True),
),
]
| 32.759494
| 62
| 0.57728
| 241
| 2,588
| 6.008299
| 0.207469
| 0.174033
| 0.224448
| 0.260359
| 0.867403
| 0.847376
| 0.81768
| 0.81768
| 0.706492
| 0.660221
| 0
| 0.033708
| 0.31221
| 2,588
| 78
| 63
| 33.179487
| 0.779775
| 0.017388
| 0
| 0.805556
| 1
| 0
| 0.15978
| 0.075954
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013889
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b8a8d95d5f169a6dc3b50749f94572862b12fafd
| 29,261
|
py
|
Python
|
Core/Support/Mailer.py
|
Lucksi/Titanium
|
c2e3c6f2d1107ca94d5f280713f86d052024cf4a
|
[
"Apache-2.0"
] | 16
|
2021-04-17T08:32:50.000Z
|
2022-02-20T21:22:20.000Z
|
Core/Support/Mailer.py
|
Lucksi/Titanium
|
c2e3c6f2d1107ca94d5f280713f86d052024cf4a
|
[
"Apache-2.0"
] | null | null | null |
Core/Support/Mailer.py
|
Lucksi/Titanium
|
c2e3c6f2d1107ca94d5f280713f86d052024cf4a
|
[
"Apache-2.0"
] | 3
|
2021-11-09T12:48:30.000Z
|
2022-02-27T23:11:27.000Z
|
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from Core.Support import Font
from time import sleep
import os
class Sender:
@staticmethod
def instagram_template(email, password, vict_email, username, links):
print(Font.Color.GREEN + "[+]" + Font.Color.WHITE + "GENERATING TEMPLATE....")
f = open("Phishing/Template/template.html", "w+")
f.write("""
<html>
<body>
<div style="margin:0;padding:0" dir="ltr" bgcolor="#ffffff">
<table border="0" cellspacing="0" cellpadding="0" align="center" id="m_-6771075845464280513email_table" style="border-collapse:collapse">
<tbody>
<tr>
<td id="m_-6771075845464280513email_content" style="font-family:Helvetica Neue,Helvetica,Lucida Grande,tahoma,verdana,arial,sans-serif;background:#ffffff">
<table border="0" width="100%" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td height="20" style="line-height:20px" colspan="3"> </td>
</tr>
<tr>
<td height="1" colspan="3" style="line-height:1px"></td>
</tr>
<tr>
<td>
<table border="0" width="100%" cellspacing="0" cellpadding="0" style="border-collapse:collapse;border:solid 1px white;margin:0 auto 5px auto;padding:3px 0;text-align:center;width:430px">
<tbody>
<tr>
<td width="15px" style="width:15px">
</td>
<td style="line-height:0px;width:400px;padding:0 0 15px 0">
<table border="0" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td style="width:100%;text-align:left;height:33px">
<img height="33" src="https://ci4.googleusercontent.com/proxy/H-WMBt20rSRWwIK0YLwC8Uyi1mnXWEEEiUT0twBA78N4_Rbri9VuqAL_Azd6xVjLkSiTQ6r1RjyDJ2Hx1O_3UqLo4H_LxG1o8LHL4yDfZw=s0-d-e1-ft#https://static.xx.fbcdn.net/rsrc.php/v3/yb/r/QTa-gpOyYBa.png" style="border:0" class="CToWUd">
</td>
</tr>
</tbody>
</table>
</td>
<td width="15px" style="width:15px">
</td>
</tr>
<tr>
<td width="15px" style="width:15px">
</td>
<td style="border-top:solid 1px #dbdbdb">
</td>
<td width="15px" style="width:15px">
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td>
<table border="0" width="430" cellspacing="0" cellpadding="0" style="border-collapse:collapse;margin:0 auto 0 auto">
<tbody>
<tr>
<td>
<table border="0" width="430px" cellspacing="0" cellpadding="0" style="border-collapse:collapse;margin:0 auto 0 auto;width:430px">
<tbody>
<tr>
<td width="15" style="display:block;width:15px"> </td>
</tr>
<tr>
<td>
<table border="0" width="100%" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td>
<table border="0" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td width="20" style="display:block;width:20px"> </td>
<td>
<table border="0" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td>
</td>
</tr>
<tr>
<td>
<p style="margin:10px 0 10px 0;color:#565a5c;font-size:18px">Hi """ + username + """</p><p style="margin:10px 0 10px 0;color:#565a5c;font-size:18px">We noticed some problems with your account.</p>
</td></tr>
<tr></tr>
<tr>
<td height="10" style="line-height:10px" colspan="1"> </td></tr>
<tr>
<td>
<table border="0" width="390" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<tr>
<td>
<p style="margin:1px 0 10px 0;color:#565a5c;font-size:18px">If you ignore this message, we will remove your account immidiately for the safety of our service.
</p></td></tr></tbody></table></td><td width="20" style="display:block;width:20px"> </td></tr>
</tbody>
<td style="border-collapse:collapse;border-radius:3px;text-align:center;display:block;border:solid 1px #009fdf;padding:10px 16px 14px 16px;margin:0 2px 0 auto;min-width:80px;background-color:#47a2ea"><a href=""" + '' + links + ''""" style="color:#3b5998;text-decoration:none;display:block" target="_blank" data-saferedirecturl=""><center><font size="3"><span style="font-family:Helvetica Neue,Helvetica,Roboto,Arial,sans-serif;white-space:nowrap;font-weight:bold;vertical-align:middle;color:#fdfdfd;font-size:16px;line-height:16px">Check Activity
</span></font></center></a></td></tr></tbody></table></a></td></tr>
<tr><td height="10" style="line-height:10px" colspan="1">
</td>
</tr>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td></tr><tr><td>
<table border="0" width="430px" cellspacing="0" cellpadding="0" style="border-collapse:collapse;margin:0 auto 0 auto;width:430px"><tbody><tr><td height="5" style="line-height:5px" colspan="3"> </td></tr><tr><td width="20" style="display:block;width:20px"> </td><td><div style="padding-top:10px"><img src="https://ci4.googleusercontent.com/proxy/1jVmGWy9tCnCqBWLSinJ6Z8m-mANhlu-0HJJpn3x1Rf1YzMg3CCnm8YzpKQh29yaES9XHM9NySfBVkv1HDbly59FbBb3QtlImd0tFZxpVA=s0-d-e1-ft#https://static.xx.fbcdn.net/rsrc.php/v3/yP/r/ARZq-vP6uSX.png" height="30" width="77" alt="" class="CToWUd"><br>
</div>
<div style="height:10px">
</div>
<div style="color:#abadae;font-size:11px;margin:0 auto 5px auto">© Instagram. Facebook Inc., 1601 Willow Road, Menlo Park, CA 94025
<br>
</div>
<div style="color:#abadae;font-size:11px;margin:0 auto 5px auto">This message was sent to you <a style="color:#abadae;text-decoration:underline">""" + vict_email + """</a> and intended for """ + username + """. Not your account? <a href="https://instagram.com/accounts/remove/report_wrong_email/2s61r5s/5ek-693483ceb4d80b065e6015805237eeb6/w5nY3j90/bHVjYWdhcm9mYWxvMDJAZ21haWwuY29t/" style="color:#abadae;text-decoration:underline" target="_blank" data-saferedirecturl="https://www.google.com/url?q=https://instagram.com/accounts/remove/report_wrong_email/2s61r5s/5ek-693483ceb4d80b065e6015805237eeb6/w5nY3j90/bHVjYWdhcm9mYWxvMDJAZ21haWwuY29t/&source=gmail&ust=1612870216348000&usg=AFQjCNGZI-bOmXLy7H1qDa0SbyZStfxslg">Remove your email</a> from this account.
<br>
</div>
</td>
<td width="20" style="display:block;width:20px">
</td></tr></tbody></table></td></tr>
<tr>
<td height="20" style="line-height:20px" colspan="3">
</td></tr>
</tbody></table>
<span><img src="https://ci6.googleusercontent.com/proxy/dgydYneR-yCzdfy07nuLAaMs8WnN0VZ1eV-D9sNxk0-0wcuXlqohYxappUKrQ09YKBQTrsU8pgd0y5uogt3Ek8--FchRo1YqHtDhMDHOmayLCwlSakm2xj9aIE0dRyTkvny8_b66Ff5O1oCjiCkfbf9P7BNU=s0-d-e1-ft#https://www.facebook.com/email_open_log_pic.php?mid=5a03a2edede13G24bc2e43fd55c0G5a03a7874e0e7G248" style="border:0;width:1px;height:1px" class="CToWUd"></span></td></tr></tbody></table></div>
</body>
</html>
""")
f.close()
html = open("Phishing/Template/template.html")
message = MIMEMultipart()
message = MIMEText(html.read(), "html")
message['From'] = "Instagram:"
message['To'] = vict_email
message['Subject'] = "Problem with your Instagram account"
# SERVER DECLARATION #
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(email, password)
text = message.as_string()
try:
server.sendmail(email, vict_email, text)
print(Font.Color.WHITE + "\n[+]" + Font.Color.YELLOW + "EMAIL SENT" + Font.Color.WHITE + "[+]")
except Exception as e:
print(e)
os.remove("Phishing/Template/template.html")
@staticmethod
def instagram_secured(email, password, vict_email, username):
print(Font.Color.GREEN + "[+]" + Font.Color.WHITE + "TOKEN FOUND")
print(Font.Color.GREEN + "\n[+]" + Font.Color.WHITE + "GENERATING SECURETED TEMPLATE....")
sleep(2)
f = open("Phishing/Template/confirmed.html", "w+")
f.write("""
<html>
<body>
<div style="margin:0;padding:0" dir="ltr" bgcolor="#ffffff">
<table border="0" cellspacing="0" cellpadding="0" align="center" id="m_-6771075845464280513email_table" style="border-collapse:collapse">
<tbody>
<tr>
<td id="m_-6771075845464280513email_content" style="font-family:Helvetica Neue,Helvetica,Lucida Grande,tahoma,verdana,arial,sans-serif;background:#ffffff">
<table border="0" width="100%" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td height="20" style="line-height:20px" colspan="3"> </td>
</tr>
<tr>
<td height="1" colspan="3" style="line-height:1px"></td>
</tr>
<tr>
<td>
<table border="0" width="100%" cellspacing="0" cellpadding="0" style="border-collapse:collapse;border:solid 1px white;margin:0 auto 5px auto;padding:3px 0;text-align:center;width:430px">
<tbody>
<tr>
<td width="15px" style="width:15px">
</td>
<td style="line-height:0px;width:400px;padding:0 0 15px 0">
<table border="0" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td style="width:100%;text-align:left;height:33px">
<img height="33" src="https://ci4.googleusercontent.com/proxy/H-WMBt20rSRWwIK0YLwC8Uyi1mnXWEEEiUT0twBA78N4_Rbri9VuqAL_Azd6xVjLkSiTQ6r1RjyDJ2Hx1O_3UqLo4H_LxG1o8LHL4yDfZw=s0-d-e1-ft#https://static.xx.fbcdn.net/rsrc.php/v3/yb/r/QTa-gpOyYBa.png" style="border:0" class="CToWUd">
</td>
</tr>
</tbody>
</table>
</td>
<td width="15px" style="width:15px">
</td>
</tr>
<tr>
<td width="15px" style="width:15px">
</td>
<td style="border-top:solid 1px #dbdbdb">
</td>
<td width="15px" style="width:15px">
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td>
<table border="0" width="430" cellspacing="0" cellpadding="0" style="border-collapse:collapse;margin:0 auto 0 auto">
<tbody>
<tr>
<td>
<table border="0" width="430px" cellspacing="0" cellpadding="0" style="border-collapse:collapse;margin:0 auto 0 auto;width:430px">
<tbody>
<tr>
<td width="15" style="display:block;width:15px"> </td>
</tr>
<tr>
<td>
<table border="0" width="100%" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td>
<table border="0" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td width="20" style="display:block;width:20px"> </td>
<td>
<table border="0" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
<tr>
<td>
</td>
</tr>
<tr>
<td>
<p style="margin:10px 0 10px 0;color:#565a5c;font-size:18px;text-align: center;">Thank you for have secured your account.</p>
</td></tr>
<tr></tr>
<tr>
<td height="10" style="line-height:10px" colspan="1"> </td></tr>
<tr>
<td>
<table border="0" width="390" cellspacing="0" cellpadding="0" style="border-collapse:collapse">
<tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td></tr><tr><td>
<table border="0" width="430px" cellspacing="0" cellpadding="0" style="border-collapse:collapse;margin:0 auto 0 auto;width:430px"><tbody><tr><td height="5" style="line-height:5px" colspan="3"> </td></tr><tr><td width="20" style="display:block;width:20px"> </td><td><div style="padding-top:10px"><img src="https://ci4.googleusercontent.com/proxy/1jVmGWy9tCnCqBWLSinJ6Z8m-mANhlu-0HJJpn3x1Rf1YzMg3CCnm8YzpKQh29yaES9XHM9NySfBVkv1HDbly59FbBb3QtlImd0tFZxpVA=s0-d-e1-ft#https://static.xx.fbcdn.net/rsrc.php/v3/yP/r/ARZq-vP6uSX.png" height="30" width="77" alt="" class="CToWUd"><br>
</div>
<div style="height:10px">
</div>
<div style="color:#abadae;font-size:11px;margin:0 auto 5px auto">© Instagram. Facebook Inc., 1601 Willow Road, Menlo Park, CA 94025
<br>
</div>
<div style="color:#abadae;font-size:11px;margin:0 auto 5px auto">This message was sent to """ + vict_email + """ <a style="color:#abadae;text-decoration:underline"><!--HERE COMES THE VICTIM EMAIL--></a> and intended for """ + username + """. Not your account? <a href="https://instagram.com/accounts/remove/report_wrong_email/2s61r5s/5ek-693483ceb4d80b065e6015805237eeb6/w5nY3j90/bHVjYWdhcm9mYWxvMDJAZ21haWwuY29t/" style="color:#abadae;text-decoration:underline" target="_blank" data-saferedirecturl="https://www.google.com/url?q=https://instagram.com/accounts/remove/report_wrong_email/2s61r5s/5ek-693483ceb4d80b065e6015805237eeb6/w5nY3j90/bHVjYWdhcm9mYWxvMDJAZ21haWwuY29t/&source=gmail&ust=1612870216348000&usg=AFQjCNGZI-bOmXLy7H1qDa0SbyZStfxslg">Remove your email</a> from this account.
<br>
</div>
</td>
<td width="20" style="display:block;width:20px">
</td></tr></tbody></table></td></tr>
<tr>
<td height="20" style="line-height:20px" colspan="3">
</td></tr>
</tbody></table>
<span><img src="https://ci6.googleusercontent.com/proxy/dgydYneR-yCzdfy07nuLAaMs8WnN0VZ1eV-D9sNxk0-0wcuXlqohYxappUKrQ09YKBQTrsU8pgd0y5uogt3Ek8--FchRo1YqHtDhMDHOmayLCwlSakm2xj9aIE0dRyTkvny8_b66Ff5O1oCjiCkfbf9P7BNU=s0-d-e1-ft#https://www.facebook.com/email_open_log_pic.php?mid=5a03a2edede13G24bc2e43fd55c0G5a03a7874e0e7G248" style="border:0;width:1px;height:1px" class="CToWUd"></span></td></tr></tbody></table></div>
</body>
</html>
""")
f.close()
html = open("Phishing/Template/confirmed.html")
message = MIMEMultipart()
message = MIMEText(html.read(), "html")
message['From'] = "Instagram:"
message['To'] = vict_email
message['Subject'] = "Instagram Account secured"
# SERVER DECLARATION #
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(email, password)
text = message.as_string()
try:
server.sendmail(email, vict_email, text)
print(Font.Color.WHITE + "\n[+]" + Font.Color.YELLOW + "EMAIL SENT" + Font.Color.WHITE + "[+]")
except Exception as e:
print(e)
os.remove("Phishing/Template/confirmed.html")
@staticmethod
def google_template(email, password, vict_email, links):
print(Font.Color.GREEN + "[+]" + Font.Color.WHITE + "GENERATING TEMPLATE....")
f = open("Phishing/Template/template.html", "w")
f.write("""
<html>
<body>
<div style="border-style:solid;border-width:thin;border-color:#dadce0;border-radius:8px;padding:40px 20px" align="center">
<img src="https://ci5.googleusercontent.com/proxy/T_zJ7UbaC9x27OP4-ZCPfDipqYLSGum30AlaxEycVclfvxO8Cze0sZ0kCrXlx6a-MgvW2tswbIyiNVfczjDuGh9okorzC5SUJDfwkHr6-3j1KUu94HuAw5uxM_jaElQef3Sub84=s0-d-e1-ft#https://www.gstatic.com/images/branding/googlelogo/2x/googlelogo_color_74x24dp.png" aria-hidden="true" style="margin-bottom:16px" alt="Google" class="CToWUd" width="74" height="24">
<div style="font-family:'Google Sans',Roboto,RobotoDraft,Helvetica,Arial,sans-serif;border-bottom:thin solid #dadce0;color:rgba(0,0,0,0.87);line-height:32px;padding-bottom:24px;text-align:center;word-break:break-word">
<div style="font-size:24px">We have detected a problem with your account
</div>
<table style="margin-top:8px" align="center">
<tbody>
<tr style="line-height:normal">
<td style="padding-right:8px" align="right">
</td>
<td>
</td></tr>
<img align ="center"style="width:40px;height:40px;border-radius:50%" src="https://cdn1.iconfinder.com/data/icons/color-bold-style/21/08-512.png" alt="" class="CToWUd" width="40" height="40">
</tbody></table>
</div>
<div style="font-family:Roboto-Regular,Helvetica,Arial,sans-serif;font-size:14px;color:rgba(0,0,0,0.87);line-height:20px;padding-top:20px;text-align:center">Hi <a style="font-family:'Google Sans',Roboto,RobotoDraft,Helvetica,Arial,sans-serif;color:rgba(0,0,0,0.87);font-size:14px;line-height:20px;margin-top:10px;">""" + vict_email + """</a> looks like your account has been compromised please verify your credentials
<div style="padding-top:32px;text-align:center">
<a href=""" + '' + links + ''""" style="font-family:'Google Sans',Roboto,RobotoDraft,Helvetica,Arial,sans-serif;line-height:16px;color:#ffffff;font-weight:400;text-decoration:none;font-size:14px;display:inline-block;padding:10px 24px;background-color:#4184f3;border-radius:5px;min-width:90px" target="_blank" data-saferedirecturl="">Check the activity</a></div></div>
<div style="padding-top:20px;font-size:12px;line-height:16px;color:#5f6368;letter-spacing:0.3px;text-align:center">you can also check your activity here:
<br>
<a style="color:rgba(0,0,0,0.87);text-decoration:inherit">https://myaccount.google.com/n<wbr>otifications</a></div></div>
<div style="text-align:center">
<div style="font-family:Roboto-Regular,Helvetica,Arial,sans-serif;color:rgba(0,0,0,0.54);font-size:11px;line-height:18px;padding-top:12px;text-align:center">
<div style="direction:ltr">© 2021 Google Ireland Ltd., <a class="m_-4473426257341250174afal" style="font-family:Roboto-Regular,Helvetica,Arial,sans-serif;color:rgba(0,0,0,0.54);font-size:11px;line-height:18px;padding-top:12px;text-align:center">Gordon House, Barrow Street, Dublin 4, Ireland</a></div></div></div></td><td width="8" style="width:8px"></td></tr></tbody></table></td></tr><tr height="32" style="height:32px"><td></td></tr></tbody></table></div></div>
</body>
</html>
""")
f.close()
html = open("Phishing/Template/template.html")
message = MIMEMultipart()
message = MIMEText(html.read(), "html")
message['From'] = "Google:"
message['To'] = vict_email
message['Subject'] = "Problem with your Google account"
# SERVER DECLARATION #
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(email, password)
text = message.as_string()
try:
server.sendmail(email, vict_email, text)
print(Font.Color.WHITE + "\n[+]" + Font.Color.YELLOW + "EMAIL SENT" + Font.Color.WHITE + "[+]")
except Exception as e:
print(e)
os.remove("Phishing/Template/template.html")
@staticmethod
def google_secured(email, password, vict_email):
print(Font.Color.GREEN + "[+]" + Font.Color.WHITE + "TOKEN FOUND")
print(Font.Color.GREEN + "\n[+]" + Font.Color.WHITE + "GENERATING SECURETED TEMPLATE....")
sleep(2)
f = open("Phishing/Template/confirmed.html", "w+")
f.write("""
<html>
<body>
<div style="border-style:solid;border-width:thin;border-color:#dadce0;border-radius:8px;padding:40px 20px" align="center">
<img src="https://ci5.googleusercontent.com/proxy/T_zJ7UbaC9x27OP4-ZCPfDipqYLSGum30AlaxEycVclfvxO8Cze0sZ0kCrXlx6a-MgvW2tswbIyiNVfczjDuGh9okorzC5SUJDfwkHr6-3j1KUu94HuAw5uxM_jaElQef3Sub84=s0-d-e1-ft#https://www.gstatic.com/images/branding/googlelogo/2x/googlelogo_color_74x24dp.png" aria-hidden="true" style="margin-bottom:16px" alt="Google" class="CToWUd" width="74" height="24">
<div style="font-family:'Google Sans',Roboto,RobotoDraft,Helvetica,Arial,sans-serif;border-bottom:thin solid #dadce0;color:rgba(0,0,0,0.87);line-height:32px;padding-bottom:24px;text-align:center;word-break:break-word">
<div style="font-size:24px">Account secured
</div>
<table style="margin-top:8px" align="center">
<tbody>
<tr style="line-height:normal">
<td style="padding-right:8px" align="right">
<a style="font-family:'Google Sans',Roboto,RobotoDraft,Helvetica,Arial,sans-serif;color:rgba(0,0,0,0.87);font-size:14px;line-height:20px;margin-top:10px;">""" + vict_email + """</a>
</td>
<td>
</td></tr>
</tbody></table>
</div>
<div style="font-family:Roboto-Regular,Helvetica,Arial,sans-serif;font-size:14px;color:rgba(0,0,0,0.87);line-height:20px;padding-top:20px;text-align:center">Thank you for have secured your account
<div style="padding-top:32px;text-align:center">
<div style="padding-top:20px;font-size:12px;line-height:16px;color:#5f6368;letter-spacing:0.3px;text-align:center">you can also check your activity here:
<br>
<a style="color:rgba(0,0,0,0.87);text-decoration:inherit">https://myaccount.google.com/n<wbr>otifications</a></div></div>
<div style="text-align:center">
<div style="font-family:Roboto-Regular,Helvetica,Arial,sans-serif;color:rgba(0,0,0,0.54);font-size:11px;line-height:18px;padding-top:12px;text-align:center">
<div style="direction:ltr">© 2021 Google Ireland Ltd., <a class="m_-4473426257341250174afal" style="font-family:Roboto-Regular,Helvetica,Arial,sans-serif;color:rgba(0,0,0,0.54);font-size:11px;line-height:18px;padding-top:12px;text-align:center">Gordon House, Barrow Street, Dublin 4, Ireland</a></div></div></div></td><td width="8" style="width:8px"></td></tr></tbody></table></td></tr><tr height="32" style="height:32px"><td></td></tr></tbody></table></div></div>
</body>
</html>
""")
f.close()
html = open("Phishing/Template/confirmed.html")
message = MIMEMultipart()
message = MIMEText(html.read(), "html")
message['From'] = "Google:"
message['To'] = vict_email
message['Subject'] = "Google Account secured"
# SERVER DECLARATION #
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(email, password)
text = message.as_string()
try:
server.sendmail(email, vict_email, text)
print(Font.Color.WHITE + "\n[+]" + Font.Color.YELLOW + "EMAIL SENT" + Font.Color.WHITE + "[+]")
except Exception as e:
print(e)
os.remove("Phishing/Template/confirmed.html")
@staticmethod
def github_template(email, password, vict_email, username, links):
print(Font.Color.GREEN + "[+]" + Font.Color.WHITE + "GENERATING TEMPLATE....")
f = open("Phishing/Template/template.html", "w")
f.write("""
<html>
<body>
<div id=":29g" class="a3s aiL ">Hey """ + username + """<br>
<br>
Someone has tried to access on your account your data may be at risk
<br>
<br>
Log in to confirm that is you and check the activity <a href= """ + "" + links + "" """ target="_blank" >Check the activity</a><br>
<br>
If you run into problems, please contact support by visiting <a href="" style="color:#0000ff" rel="noreferrer" target="_blank" data-saferedirecturl="https://www.google.com/url?q=https://github.com/contact&source=gmail&ust=1619524905264000&usg=AFQjCNH_U_nM6w3FRIK6U8wqjC8y_Rmk7g">https://<span class="il">github</span>.com/contact</a><br>
<br>
Thanks,<br>
The <span class="il">GitHub</span> Team<div class="yj6qo"></div><div class="adL"><br>
</div></div>
</body>
</html>
""")
f.close()
html = open("Phishing/Template/template.html")
message = MIMEMultipart()
message = MIMEText(html.read(), "html")
message['From'] = "GitHub:"
message['To'] = vict_email
message['Subject'] = "Problem with your GitHub account"
# SERVER DECLARATION #
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(email, password)
text = message.as_string()
try:
server.sendmail(email, vict_email, text)
print(Font.Color.WHITE + "\n[+]" + Font.Color.YELLOW + "EMAIL SENT" + Font.Color.WHITE + "[+]")
except Exception as e:
print(e)
os.remove("Phishing/Template/template.html")
@staticmethod
def github_secured(email, password, vict_email, username):
print(Font.Color.GREEN + "[+]" + Font.Color.WHITE + "TOKEN FOUND")
print(Font.Color.GREEN + "\n[+]" + Font.Color.WHITE + "GENERATING SECURETED TEMPLATE....")
sleep(2)
f = open("Phishing/Template/confirmed.html", "w+")
f.write("""
<html>
<body>
<div id=":29g" class="a3s aiL ">Hey """ + username + """<br>
<br>
Thank you for have secured your account
<br>
</div>
</body>
</html>
""")
f.close()
html = open("Phishing/Template/confirmed.html")
message = MIMEMultipart()
message = MIMEText(html.read(), "html")
message['From'] = "GitHub:"
message['To'] = vict_email
message['Subject'] = "GitHub Account secured"
# SERVER DECLARATION #
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(email, password)
text = message.as_string()
try:
server.sendmail(email, vict_email, text)
print(Font.Color.WHITE + "\n[+]" + Font.Color.YELLOW + "EMAIL SENT" + Font.Color.WHITE + "[+]")
except Exception as e:
print(e)
os.remove("Phishing/Template/confirmed.html")
@staticmethod
def Coming_soon():
print(Font.Color.RED + "\n[!]" + Font.Color.WHITE + "SORRY BUT THE EMAIL TEMPLATE IS NOT AVAIABLE YET")
| 57.828063
| 825
| 0.570555
| 3,361
| 29,261
| 4.942874
| 0.11812
| 0.01228
| 0.009029
| 0.03738
| 0.89743
| 0.887618
| 0.884428
| 0.876422
| 0.870222
| 0.867513
| 0
| 0.061221
| 0.268719
| 29,261
| 505
| 826
| 57.942574
| 0.714973
| 0.004033
| 0
| 0.907598
| 0
| 0.160164
| 0.813588
| 0.268358
| 0.00616
| 0
| 0
| 0
| 0
| 1
| 0.014374
| false
| 0.024641
| 0.01232
| 0
| 0.028747
| 0.045175
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b222f169502dae47fd24379c48a6f7e1bc9a994b
| 93,666
|
py
|
Python
|
src/main.py
|
LinWeizheDragon/AutoFidgetDetection
|
8a0d0fcc8938c2c9e97655e999e226c61f414cfe
|
[
"MIT"
] | 3
|
2021-02-04T03:36:04.000Z
|
2021-11-29T13:59:25.000Z
|
src/main.py
|
LinWeizheDragon/AutoFidgetDetection
|
8a0d0fcc8938c2c9e97655e999e226c61f414cfe
|
[
"MIT"
] | 1
|
2021-09-15T23:17:40.000Z
|
2021-09-16T01:24:40.000Z
|
src/main.py
|
LinWeizheDragon/AutoFidgetDetection
|
8a0d0fcc8938c2c9e97655e999e226c61f414cfe
|
[
"MIT"
] | null | null | null |
from utility.base_config import *
from pprint import pprint
import os
import json
import cv2
import math
import random
import numpy as np
import pandas as pd
import pickle
from scipy.signal import savgol_filter
from scipy import stats
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
from sklearn.model_selection import KFold
from utility.colors import *
from model.fidgeting_dnn import Fidgeting_DNN
import matplotlib.pyplot as plt
from utility.elan_portal import ElanPortal
from utility.decompose_string import decompose_string, decompose_string_hand
from utility.dirs import create_dirs
from component.basic_processor import BasicProcessor
from component.video_processor import VideoProcessor
from component.optical_flow_analyser import OpticalFlowAnalyser
from component.hand_cross_analyser import HandCrossAnalyser
from component.hand_location_analyser import HandLocationAnalyser
from component.leg_action_analyser import LegActionAnalyser
from component.leg_location_analyser import LegLocationAnalyser
from component.label_machine import LabelMachine
class MainPipeline(BasicProcessor):
def __init__(self, name, path_data, batch_data=None):
self.name = name
if path_data is not None:
# single video processing
self.path_data = path_data
self.video_path = path_data['video']
self.openpose_output_path = path_data['openpose_data']
self.openface_output_file = path_data['openface_data']
self.processed_file = os.path.join(DATA_FOLDER, 'processed_data',
path_data['file_format'] + '.npy')
self.processed_smooth_file = os.path.join(DATA_FOLDER, 'processed_data_smooth',
path_data['file_format'] + '.npy')
self.participant_id = path_data['participant_id']
self.session_id = path_data['session_id']
else:
# general processing
self.batch_data = batch_data
def read_labels(self, label_file):
cap = cv2.VideoCapture(self.video_path)
fps = cap.get(cv2.CAP_PROP_FPS)
print('fps:', fps)
portal = ElanPortal()
portal.read(label_file, fps)
print(portal.get_segments('Leg Action', 'static'))
# hand location
left_hand_on_hand = self.transfer_to_array(portal.get_segments('Left Hand Location', 'on hand'))
right_hand_on_hand = self.transfer_to_array(portal.get_segments('Left Hand Location', 'on hand'))
left_hand_on_leg = self.transfer_to_array(portal.get_segments('Left Hand Location', 'on leg'))
right_hand_on_leg = self.transfer_to_array(portal.get_segments('Right Hand Location', 'on leg'))
left_hand_on_arm = self.transfer_to_array(portal.get_segments('Left Hand Location', 'on arm'))
right_hand_on_arm = self.transfer_to_array(portal.get_segments('Right Hand Location', 'on arm'))
left_hand_on_face = self.transfer_to_array(portal.get_segments('Left Hand Location', 'on face'))
right_hand_on_face = self.transfer_to_array(portal.get_segments('Right Hand Location', 'on face'))
# leg location
leg_on_leg = self.transfer_to_array(portal.get_segments('Leg Location', 'on leg'))
leg_on_ground = self.transfer_to_array(portal.get_segments('Leg Location', 'on leg'))
# action
left_hand_static = self.transfer_to_array(portal.get_segments('Left Hand Action', 'static'))
left_hand_rhythmic = self.transfer_to_array(portal.get_segments('Left Hand Action', 'rhythmic'))
right_hand_static = self.transfer_to_array(portal.get_segments('Right Hand Action', 'static'))
right_hand_rhythmic = self.transfer_to_array(portal.get_segments('Right Hand Action', 'rhythmic'))
leg_static = self.transfer_to_array(portal.get_segments('Leg Action', 'static'))
leg_rhythmic = self.transfer_to_array(portal.get_segments('Leg Action', 'rhythmic'))
# copy left to right
right_hand_static[(left_hand_on_hand == 1) & (left_hand_static == 1)] = 1
right_hand_rhythmic[(left_hand_on_hand == 1) & (left_hand_rhythmic == 1)] = 1
result = {
'left_hand_on_hand': left_hand_on_hand,
'right_hand_on_hand': right_hand_on_hand,
'left_hand_on_leg': left_hand_on_leg,
'right_hand_on_leg': right_hand_on_leg,
'left_hand_on_arm': left_hand_on_arm,
'right_hand_on_arm': right_hand_on_arm,
'left_hand_on_face': left_hand_on_face,
'right_hand_on_face': right_hand_on_face,
'leg_on_leg': leg_on_leg,
'leg_on_ground': leg_on_ground,
'left_hand_static': left_hand_static,
'left_hand_rhythmic': left_hand_rhythmic,
'right_hand_static': right_hand_static,
'right_hand_rhythmic': right_hand_rhythmic,
'leg_static': leg_static,
'leg_rhythmic': leg_rhythmic,
}
return result
def export_elan_portal(self):
cap = cv2.VideoCapture(self.video_path)
fps = cap.get(cv2.CAP_PROP_FPS)
print('fps:', fps)
portal = ElanPortal()
print('computing leg intersection...')
instance_leg = LegLocationAnalyser(self.name, self.path_data)
leg_continuous_segments, intersect_data = instance_leg.compute_leg_intersection(cutoff=0, min_length=20)
print('computing left hand location...')
instance_left_hand = HandLocationAnalyser(self.name, self.path_data, hand='left')
left_hand_arm_continuous_segments, left_hand_leg_continuous_segments, left_hand_face_continuous_segments = \
instance_left_hand.compute_hand_intersection(cutoff=0, min_length=80)
print('computing right hand location...')
instance_right_hand = HandLocationAnalyser(self.name, self.path_data, hand='right')
right_hand_arm_continuous_segments, right_hand_leg_continuous_segments, right_hand_face_continuous_segments = \
instance_right_hand.compute_hand_intersection(cutoff=0, min_length=80)
print('computing hand cross')
instance_hand_cross = HandCrossAnalyser(self.name, self.path_data)
continuous_segments, valid_intersect_data = instance_hand_cross.compute_stationary_rectangles(cutoff=0,
min_length=20)
print('computing left hand static')
left_static_segments = instance_hand_cross.compute_static_hands_without_crossing('left')
print('computing right hand static')
right_static_segments = instance_hand_cross.compute_static_hands_without_crossing('right')
static_segments, dynamic_segments, rhythmic_segments, dynamic_rythmic_segments = instance_hand_cross.compute_static_and_rhythmic_with_hand_cross()
print('computing leg action')
instance_leg_action = LegActionAnalyser(self.name, self.path_data)
foot_static_segments, foot_dynamic_segments, foot_rhythmic_segments, foot_dynamic_rythmic_segments = instance_leg_action.compute_static_and_rhythmic_feet()
def transfer_to_secs(segments):
return (np.array(segments) / fps).tolist()
continuous_segments = transfer_to_secs(continuous_segments)
left_static_segments = transfer_to_secs(left_static_segments)
right_static_segments = transfer_to_secs(right_static_segments)
static_segments = transfer_to_secs(static_segments)
dynamic_segments = transfer_to_secs(dynamic_segments)
rhythmic_segments = transfer_to_secs(rhythmic_segments)
dynamic_rythmic_segments = transfer_to_secs(dynamic_rythmic_segments)
foot_static_segments = transfer_to_secs(foot_static_segments)
foot_dynamic_rythmic_segments = transfer_to_secs(foot_dynamic_rythmic_segments)
foot_rhythmic_segments = transfer_to_secs(foot_rhythmic_segments)
left_hand_arm_continuous_segments = transfer_to_secs(left_hand_arm_continuous_segments)
left_hand_leg_continuous_segments = transfer_to_secs(left_hand_leg_continuous_segments)
left_hand_face_continuous_segments = transfer_to_secs(left_hand_face_continuous_segments)
right_hand_arm_continuous_segments = transfer_to_secs(right_hand_arm_continuous_segments)
right_hand_leg_continuous_segments = transfer_to_secs(right_hand_leg_continuous_segments)
right_hand_face_continuous_segments = transfer_to_secs(right_hand_face_continuous_segments)
leg_continuous_segments = transfer_to_secs(leg_continuous_segments)
# No hand cross event
portal.add_tier('Left Hand Action', 'static', left_static_segments)
portal.add_tier('Left Hand Location', 'on arm', left_hand_arm_continuous_segments)
portal.add_tier('Left Hand Location', 'on leg', left_hand_leg_continuous_segments)
portal.add_tier('Left Hand Location', 'on face', left_hand_face_continuous_segments)
portal.add_tier('Right Hand Action', 'static', right_static_segments)
portal.add_tier('Right Hand Location', 'on arm', right_hand_arm_continuous_segments)
portal.add_tier('Right Hand Location', 'on leg', right_hand_leg_continuous_segments)
portal.add_tier('Right Hand Location', 'on face', right_hand_face_continuous_segments)
# Hand cross event
portal.add_tier('Left Hand Location', 'on hand', continuous_segments)
portal.add_tier('Left Hand Action', 'static', static_segments)
portal.add_tier('Left Hand Action', 'rhythmic', rhythmic_segments)
# Leg Event
portal.add_tier('Leg Action', 'static', foot_static_segments)
portal.add_tier('Leg Action', 'rhythmic', foot_dynamic_rythmic_segments)
portal.add_tier('Leg Action', 'rhythmic', foot_rhythmic_segments)
portal.add_tier('Leg Location', 'on leg', leg_continuous_segments)
portal.export(os.path.join(DATA_FOLDER, 'label', 'generated', '{}_{}.txt'.format(
self.participant_id, self.session_id
)))
def generate_hand_cross_slice(self):
'''
generate raw hand cross slices
:return:
'''
instance_hand_cross_analyser = HandCrossAnalyser(self.name, self.path_data)
continuous_segments, valid_intersect_data = instance_hand_cross_analyser.compute_stationary_rectangles(
cutoff=0, min_length=20)
print(continuous_segments)
# best_rects = {}
# for segment in continuous_segments:
# # find largest rectangle for each segment
# rects = [valid_intersect_data[i] for i in valid_intersect_data.keys()
# if i >= segment[0] and i < segment[1]]
# rects = np.array(rects)
# best_rect = np.hstack((np.min(rects, axis=0)[:2], np.max(rects, axis=0)[2:]))
# for i in range(segment[0], segment[1]):
# best_rects[i] = best_rect
window_size = 100
window_step = 50
min_size = 100
cap = cv2.VideoCapture(self.video_path)
of_analyser = OpticalFlowAnalyser('test', self.path_data)
init_points = np.hstack(
(of_analyser.data[:, 194:232],
of_analyser.data[:, 236:274])
)
participant_id = self.participant_id
session_id = self.session_id
for segment in continuous_segments:
starting = int(segment[0])
ending = int(segment[1])
max_length = ending - starting
print('-------->', starting, ending)
for i in range(math.floor((max_length - window_size) / window_step) + 2):
sub_starting, sub_ending = i * window_step, i * window_step + window_size
sub_starting += starting
sub_ending += starting
if sub_ending > ending:
sub_ending = ending
if sub_ending - sub_starting < min_size:
sub_starting = sub_ending - window_size
if sub_starting < starting:
# can't take at least one window
continue
assert sub_ending - sub_starting <= window_size, 'sub slice must == to window size!'
print('start slicing:', sub_starting, sub_ending)
new_file_name = 'participant_video_{}_{}_<{}_{}>.npy'.format(
participant_id, session_id, sub_starting, sub_ending
)
# run optical flow instance
optical_flow_data = of_analyser.run_optical_flow(cap, starting_time=sub_starting,
ending_time=sub_ending, init_points=init_points,
visualise=False)
result = []
for i in range(sub_starting, sub_ending):
# print(optical_flow_data[i].reshape((1, -1)).shape)
# print(i)
if i not in optical_flow_data:
print('Error detected, closing segment.')
break
result.append(optical_flow_data[i].reshape((1, -1)))
result_path = os.path.join(DATA_FOLDER,
'hand_cross_analysis_optical_flow',
new_file_name
)
result_array = np.zeros((len(result), 38 * 2))
for index, frame_data in enumerate(result):
result_array[index, :frame_data.shape[1]] = frame_data
result_array[index, frame_data.shape[1]:] = result_array[index - 1, frame_data.shape[1]:]
# print(result_array)
result = result_array
# result = result.reshape((result.shape[0], result.shape[2]))
create_dirs([os.path.split(result_path)[0]])
np.save(result_path, result)
FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(result))
print(np.mean(FFT, axis=0))
print(np.mean(STD))
print(np.mean(MEAN))
cap.release()
cv2.destroyAllWindows()
print('saving completed.')
def generate_leg_slice(self):
'''
generate raw leg slices
:return:
'''
window_size = 100
window_step = 50
min_size = 100
cap = cv2.VideoCapture(self.video_path)
of_analyser = OpticalFlowAnalyser('test', self.path_data)
init_points = np.hstack(
(of_analyser.data[:, 44:50],
# of_analyser.data[:, 20:22],
of_analyser.data[:, 38:44],
# of_analyser.data[:, 26:28],
)
)
participant_id = self.participant_id
session_id = self.session_id
starting = 0
ending = of_analyser.data.shape[0]
max_length = ending - starting
print('-------->', starting, ending)
for i in range(math.floor((max_length - window_size) / window_step) + 2):
sub_starting, sub_ending = i * window_step, i * window_step + window_size
sub_starting += starting
sub_ending += starting
if sub_ending > ending:
sub_ending = ending
if sub_ending - sub_starting < min_size:
sub_starting = sub_ending - window_size
if sub_starting < starting:
# can't take at least one window
continue
assert sub_ending - sub_starting <= window_size, 'sub slice must == to window size!'
print('start slicing:', sub_starting, sub_ending)
new_file_name = 'participant_video_{}_{}_<{}_{}>.npy'.format(
participant_id, session_id, sub_starting, sub_ending
)
# run optical flow instance
optical_flow_data = of_analyser.run_optical_flow(cap, starting_time=sub_starting, ending_time=sub_ending,
init_points=init_points,
visualise=False)
result = []
for i in range(sub_starting, sub_ending):
# print(optical_flow_data[i].reshape((1, -1)).shape)
# print(i)
if i not in optical_flow_data:
print('Error detected, closing segment.')
break
result.append(optical_flow_data[i].reshape((1, -1)))
result_path = os.path.join(DATA_FOLDER,
'leg_action_analysis_optical_flow',
new_file_name
)
result_array = np.zeros((len(result), 6 * 2))
for index, frame_data in enumerate(result):
result_array[index, :frame_data.shape[1]] = frame_data
result_array[index, frame_data.shape[1]:] = result_array[index - 1, frame_data.shape[1]:]
# print(result_array)
result = result_array
# result = result.reshape((result.shape[0], result.shape[2]))
FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(result))
print(np.mean(FFT, axis=0))
print(np.mean(STD))
print(np.mean(MEAN))
#
# input()
create_dirs([os.path.split(result_path)[0]])
np.save(result_path, result)
cap.release()
cv2.destroyAllWindows()
print('saving completed.')
def generate_hand_slice(self):
'''
generate raw hand cross slices
:return:
'''
instance_hand_cross_analyser = HandCrossAnalyser(self.name, self.path_data)
continuous_segments, hand_cross_intersect_data = instance_hand_cross_analyser.compute_stationary_rectangles(
min_length=20, cutoff=0)
print(continuous_segments)
window_size = 100
window_step = 50
min_size = 100
cap = cv2.VideoCapture(self.video_path)
of_analyser = OpticalFlowAnalyser('test', self.path_data)
no_cross_continuous_segments = []
no_cross_list = [i for i in range(of_analyser.data.shape[0]) if i not in hand_cross_intersect_data.keys()]
for i in no_cross_list:
if len(no_cross_continuous_segments) == 0:
no_cross_continuous_segments.append([i, i + 1])
else:
if no_cross_continuous_segments[-1][1] == i:
no_cross_continuous_segments[-1][1] += 1
else:
no_cross_continuous_segments.append([i, i + 1])
continuous_segments = no_cross_continuous_segments
print(continuous_segments)
# init_points = np.hstack(
# (of_analyser.data[:, 194:232],
# of_analyser.data[:, 236:274])
# )
init_points = of_analyser.data[:, 194:232]
participant_id = self.participant_id
session_id = self.session_id
for hand in ['left', 'right']:
if hand == 'left':
init_points = of_analyser.data[:, 194:232]
else:
init_points = of_analyser.data[:, 236:274]
for segment in continuous_segments:
starting = int(segment[0])
ending = int(segment[1])
max_length = ending - starting
print('-------->', starting, ending)
for i in range(math.floor((max_length - window_size) / window_step) + 2):
sub_starting, sub_ending = i * window_step, i * window_step + window_size
sub_starting += starting
sub_ending += starting
if sub_ending > ending:
sub_ending = ending
if sub_ending - sub_starting < min_size:
sub_starting = sub_ending - window_size
if sub_starting < starting:
# can't take at least one window
continue
assert sub_ending - sub_starting <= window_size, 'sub slice must == to window size!'
print('start slicing:', sub_starting, sub_ending)
new_file_name = 'participant_video_{}_{}_<{}_{}>_{}.npy'.format(
participant_id, session_id, sub_starting, sub_ending, hand
)
# run optical flow instance
optical_flow_data = of_analyser.run_optical_flow(cap, starting_time=sub_starting,
ending_time=sub_ending, init_points=init_points,
visualise=False)
result = []
for i in range(sub_starting, sub_ending):
# print(optical_flow_data[i].reshape((1, -1)).shape)
# print(i)
if i not in optical_flow_data:
print('Error detected, closing segment.')
break
result.append(optical_flow_data[i].reshape((1, -1)))
result_path = os.path.join(DATA_FOLDER,
'hand_action_analysis_optical_flow',
new_file_name
)
result_array = np.zeros((len(result), 19 * 2))
for index, frame_data in enumerate(result):
result_array[index, :frame_data.shape[1]] = frame_data
result_array[index, frame_data.shape[1]:] = result_array[index - 1, frame_data.shape[1]:]
# print(result_array)
result = result_array
# result = result.reshape((result.shape[0], result.shape[2]))
create_dirs([os.path.split(result_path)[0]])
np.save(result_path, result)
# FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(result))
# print(np.mean(FFT, axis=0))
# print(np.mean(STD))
# print(np.mean(MEAN))
#
# input()
cap.release()
cv2.destroyAllWindows()
print('saving completed.')
def generate_hand_slice_from_label(self):
'''
generate hand cross slices from label TODO
:return:
'''
# label_data = main_pipeline.read_labels(main_pipeline.label_files[1])
# left_hand_on_hand = label_data['left_hand_on_hand']
# continuous_segments = self.transfer_to_segments(left_hand_on_hand, min_length=20, cutoff=0)
# print(continuous_segments)
instance_hand_cross_analyser = HandCrossAnalyser(self.name, self.path_data)
continuous_segments, hand_cross_intersect_data = instance_hand_cross_analyser.compute_stationary_rectangles(min_length=20, cutoff=0)
print(continuous_segments)
window_size = 100
window_step = 50
min_size = 100
cap = cv2.VideoCapture(self.video_path)
of_analyser = OpticalFlowAnalyser('test', self.path_data)
no_cross_continuous_segments = []
no_cross_list = [i for i in range(of_analyser.data.shape[0]) if i not in hand_cross_intersect_data.keys()]
for i in no_cross_list:
if len(no_cross_continuous_segments) == 0:
no_cross_continuous_segments.append([i, i + 1])
else:
if no_cross_continuous_segments[-1][1] == i:
no_cross_continuous_segments[-1][1] += 1
else:
no_cross_continuous_segments.append([i, i + 1])
continuous_segments = no_cross_continuous_segments
print(continuous_segments)
# init_points = np.hstack(
# (of_analyser.data[:, 194:232],
# of_analyser.data[:, 236:274])
# )
participant_id = self.participant_id
session_id = self.session_id
for hand in ['left', 'right']:
if hand == 'left':
init_points = of_analyser.data[:, 194:232]
else:
init_points = of_analyser.data[:, 236:274]
for segment in continuous_segments:
starting = int(segment[0])
ending = int(segment[1])
max_length = ending - starting
print('-------->', starting, ending)
for i in range(math.floor((max_length - window_size) / window_step) + 2):
sub_starting, sub_ending = i * window_step, i * window_step + window_size
sub_starting += starting
sub_ending += starting
if sub_ending > ending:
sub_ending = ending
if sub_ending - sub_starting < min_size:
sub_starting = sub_ending - window_size
if sub_starting < starting:
# can't take at least one window
continue
assert sub_ending - sub_starting <= window_size, 'sub slice must == to window size!'
print('start slicing:', sub_starting, sub_ending)
new_file_name = 'participant_video_{}_{}_<{}_{}>_{}.npy'.format(
participant_id, session_id, sub_starting, sub_ending, hand
)
# run optical flow instance
optical_flow_data = of_analyser.run_optical_flow(cap, starting_time=sub_starting,
ending_time=sub_ending, init_points=init_points,
visualise=False)
result = []
for i in range(sub_starting, sub_ending):
# print(optical_flow_data[i].reshape((1, -1)).shape)
# print(i)
if i not in optical_flow_data:
print('Error detected, closing segment.')
break
result.append(optical_flow_data[i].reshape((1, -1)))
result_path = os.path.join(DATA_FOLDER,
'hand_action_analysis_optical_flow_label',
new_file_name
)
result_array = np.zeros((len(result), 19 * 2))
for index, frame_data in enumerate(result):
result_array[index, :frame_data.shape[1]] = frame_data
result_array[index, frame_data.shape[1]:] = result_array[index - 1, frame_data.shape[1]:]
# print(result_array)
result = result_array
# result = result.reshape((result.shape[0], result.shape[2]))
create_dirs([os.path.split(result_path)[0]])
np.save(result_path, result)
# FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(result))
# print(np.mean(FFT, axis=0))
# print(np.mean(STD))
# print(np.mean(MEAN))
#
# input()
cap.release()
cv2.destroyAllWindows()
print('saving completed.')
def hand_fidgeting_training_DNN(self):
from sklearn.model_selection import train_test_split
data = {}
for root, dirs, files in os.walk(os.path.join(DATA_FOLDER, 'hand_action_analysis_optical_flow_label')):
for file in files:
if '.npy' in file:
data[file] = np.load(os.path.join(root, file))
X = []
y = []
label_data_collection = {}
for file_name in data.keys():
participant_id, session_id, starting, ending = decompose_string(file_name)
sub_data = data[file_name]
label_file_path = os.path.join(DATA_FOLDER, 'hand_action_analysis_optical_flow_label',
file_name.replace('.npy', '.label1'))
if not os.path.exists(label_file_path):
continue
with open(label_file_path, 'r') as f:
label = f.read()
label = int(label)
if label == 2:
label = 1
if label == -1:
continue
FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(sub_data))
FFT = np.mean(FFT, axis=1)
STD = STD # np.mean(STD)
MEAN = MEAN # np.mean(MEAN, axis=0)
# ratio = np.count_nonzero(label_hand_cross_dynamic_rhythmic[starting:ending, :]) / (ending - starting)
# if ratio >= 0.8:
# y.append(1)
# else:
# y.append(0)
y.append(label)
single_x = np.hstack(
(FFT.reshape((1, -1)), STD.reshape((1, -1)), MEAN.reshape((1, -1)))
)
X.append(
single_x
)
print(file_name)
print(y)
X = np.array(X)
print(X.shape)
# divide partition
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
X_train, X_dev, y_train, y_dev = train_test_split(X_train, y_train, test_size=0.25)
def reshape_after_division(X):
return X.reshape((X.shape[0], X.shape[2]))
X_train = reshape_after_division(X_train)
X_dev = reshape_after_division(X_dev)
X_test = reshape_after_division(X_test)
dnn = Fidgeting_DNN(input_dim=[41, 76, 76], num_classes=2)
dnn.build_multi_class_model()
dnn.train_multi_class_model(X_train,
y_train,
X_dev,
y_dev, class_weight={0: 1, 1: 1.3})
dnn.evaluate_multi_class(X_train, y_train)
dnn.evaluate_multi_class(X_dev, y_dev)
dnn.evaluate_multi_class(X_test, y_test)
dnn.save_model(
os.path.join(DATA_FOLDER, 'pre-trained', 'hierarchical_DNN.h5')
)
# dnn = Fidgeting_DNN(input_dim=X_train.shape[1], num_classes=2)
# dnn.build_model()
# dnn.train_model(X_train, y_train, X_dev, y_dev, class_weight={0: 1, 1: 5})
# dnn.evaluate(X_train, y_train)
# dnn.evaluate(X_dev, y_dev)
# dnn.evaluate(X_test, y_test)
def hand_fidgeting_training_cross_validation(self):
from sklearn.model_selection import train_test_split
def reshape_after_division(X):
return X.reshape((X.shape[0], X.shape[2]))
data = {}
for root, dirs, files in os.walk(os.path.join(DATA_FOLDER, 'hand_cross_analysis_optical_flow_label')):
for file in files:
if '.npy' in file:
data[file] = np.load(os.path.join(root, file))
X = []
y = []
all_data = {}
label_data_collection = {}
for file_name in data.keys():
print(file_name)
participant_id, session_id, starting, ending = decompose_string(file_name)
all_data.setdefault(participant_id, {'data_list': [], 'label_list': []})
sub_data = data[file_name]
label_file_path = os.path.join(DATA_FOLDER, 'hand_cross_analysis_optical_flow_label',
file_name.replace('.npy', '.label1'))
if not os.path.exists(label_file_path):
continue
else:
with open(label_file_path, 'r') as f:
label1 = f.read()
label_file_path = os.path.join(DATA_FOLDER, 'hand_cross_analysis_optical_flow_label',
file_name.replace('.npy', '.label2'))
if not os.path.exists(label_file_path):
label2 = label1
else:
with open(label_file_path, 'r') as f:
label2 = f.read()
if label1 != label2:
print('drop due to disagreement')
continue
print(label1, label2)
label = int(label1)
if label == 2:
label = 1
if label == -1:
continue
FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(sub_data))
FFT = np.mean(FFT, axis=1)
STD = STD # np.mean(STD)
MEAN = MEAN # np.mean(MEAN, axis=0)
all_data[participant_id]['label_list'].append(label)
single_x = np.hstack(
(FFT.reshape((1, -1)), STD.reshape((1, -1)), MEAN.reshape((1, -1)))
)
all_data[participant_id]['data_list'].append(
single_x
)
print(all_data.keys())
id_list = np.array(list(all_data.keys()))
print(id_list)
kf = KFold(n_splits=5)
reports = []
for train_index, test_index in kf.split(id_list):
train_id_list = id_list[train_index]
test_id_list = id_list[test_index]
print(train_id_list, test_id_list)
X_train = []
y_train = []
X_test = []
y_test = []
for id in list(train_id_list):
X_train += all_data[id]['data_list']
y_train += all_data[id]['label_list']
for id in list(test_id_list):
X_test += all_data[id]['data_list']
y_test += all_data[id]['label_list']
print(len(X_train), len(y_train))
print(len(X_test), len(y_test))
X_train = np.array(X_train)
X_test = np.array(X_test)
X_train = reshape_after_division(X_train)
X_test = reshape_after_division(X_test)
from sklearn.utils.class_weight import compute_class_weight
class_weights = compute_class_weight('balanced', [0,1], y_train)
dnn = Fidgeting_DNN(input_dim=[41, 76, 76], num_classes=2)
dnn.build_multi_class_model()
print('class_weights:', class_weights)
dnn.train_multi_class_model(X_train,
y_train,
X_test,
y_test,
class_weight=class_weights)
dnn.evaluate_multi_class(X_train, y_train)
reports.append(dnn.evaluate_multi_class(X_test, y_test))
return
# dnn.save_model(
# os.path.join(DATA_FOLDER, 'pre-trained', 'hierarchical_DNN.h5')
# )
# dnn = Fidgeting_DNN(input_dim=X_train.shape[1], num_classes=2)
# dnn.build_model()
# dnn.train_model(X_train, y_train, X_dev, y_dev, class_weight={0: 1, 1: 5})
# dnn.evaluate(X_train, y_train)
# dnn.evaluate(X_dev, y_dev)
# dnn.evaluate(X_test, y_test)
def single_hand_fidgeting_training_DNN(self):
from sklearn.model_selection import train_test_split
data = {}
for root, dirs, files in os.walk(os.path.join(DATA_FOLDER, 'hand_action_analysis_optical_flow_label')):
for file in files:
if '.npy' in file:
data[file] = np.load(os.path.join(root, file))
X = []
y = []
label_data_collection = {}
for file_name in data.keys():
participant_id, session_id, starting, ending = decompose_string(file_name)
sub_data = data[file_name]
label_file_path = os.path.join(DATA_FOLDER, 'hand_action_analysis_optical_flow_label',
file_name.replace('.npy', '.label1'))
if not os.path.exists(label_file_path):
continue
print(file_name)
with open(label_file_path, 'r') as f:
label = f.read()
label = int(label)
if label == 2:
label = 1
if label == -1:
continue
FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(sub_data))
FFT = np.mean(FFT, axis=1)
STD = STD # np.mean(STD)
MEAN = MEAN # np.mean(MEAN, axis=0)
# ratio = np.count_nonzero(label_hand_cross_dynamic_rhythmic[starting:ending, :]) / (ending - starting)
# if ratio >= 0.8:
# y.append(1)
# else:
# y.append(0)
y.append(label)
single_x = np.hstack(
(FFT.reshape((1, -1)), STD.reshape((1, -1)), MEAN.reshape((1, -1)))
)
X.append(
single_x
)
print(y)
X = np.array(X)
print(X.shape)
# divide partition
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
X_train, X_dev, y_train, y_dev = train_test_split(X_train, y_train, test_size=0.25)
def reshape_after_division(X):
return X.reshape((X.shape[0], X.shape[2]))
X_train = reshape_after_division(X_train)
X_dev = reshape_after_division(X_dev)
X_test = reshape_after_division(X_test)
dnn = Fidgeting_DNN(input_dim=[41, 38, 38], num_classes=2)
dnn.build_multi_class_model()
dnn.train_multi_class_model(X_train,
y_train,
X_dev,
y_dev, class_weight={0: 1, 1: 1.1})
dnn.evaluate_multi_class(X_train, y_train)
dnn.evaluate_multi_class(X_dev, y_dev)
dnn.evaluate_multi_class(X_test, y_test)
dnn.save_model(
os.path.join(DATA_FOLDER, 'pre-trained', 'hierarchical_DNN_hand.h5')
)
# dnn = Fidgeting_DNN(input_dim=X_train.shape[1], num_classes=2)
# dnn.build_model()
# dnn.train_model(X_train, y_train, X_dev, y_dev, class_weight={0: 1, 1: 5})
# dnn.evaluate(X_train, y_train)
# dnn.evaluate(X_dev, y_dev)
# dnn.evaluate(X_test, y_test)
def foot_fidgeting_training(self):
from sklearn.model_selection import train_test_split
data = {}
for root, dirs, files in os.walk(os.path.join(DATA_FOLDER, 'leg_action_analysis_optical_flow_label')):
for file in files:
if '.npy' in file:
data[file] = np.load(os.path.join(root, file))
X = []
y = []
label_data_collection = {}
for file_name in data.keys():
participant_id, session_id, starting, ending = decompose_string(file_name)
sub_data = data[file_name]
label_file_path = os.path.join(DATA_FOLDER, 'leg_action_analysis_optical_flow_label',
file_name.replace('.npy', '.label1'))
if not os.path.exists(label_file_path):
continue
with open(label_file_path, 'r') as f:
label = f.read()
# print(sub_data.shape)
key_name = '{}_{}'.format(participant_id, session_id)
FFT, STD, MEAN = self.analyse_sequence_new(self.get_first_derivative(sub_data))
FFT = np.mean(FFT, axis=1)
STD = STD # np.mean(STD)
MEAN = MEAN # np.mean(MEAN, axis=0)
if label != '-1':
y.append(label)
single_x = np.hstack(
(FFT.reshape((1, -1)), STD.reshape((1, -1)), MEAN.reshape((1, -1)))
)
X.append(
single_x
)
print(y)
return
# print(X)
X = np.array(X)
X = X.reshape((X.shape[0], X.shape[2]))
print(X.shape)
# divide partition
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
X_train, X_dev, y_train, y_dev = train_test_split(X_train, y_train, test_size=0.25)
def reshape_after_division(X):
return X.reshape((X.shape[0], X.shape[2]))
# X_train = reshape_after_division(X_train)
# X_dev = reshape_after_division(X_dev)
# X_test = reshape_after_division(X_test)
dnn = Fidgeting_DNN(input_dim=[41, 12, 12], num_classes=3)
dnn.build_multi_class_model()
dnn.train_multi_class_model(X_train,
y_train,
X_dev,
y_dev, class_weight={0: 1, 1: 3, 2: 3})
dnn.evaluate_multi_class(X_train, y_train)
dnn.evaluate_multi_class(X_dev, y_dev)
dnn.evaluate_multi_class(X_test, y_test)
dnn.save_model(
os.path.join(DATA_FOLDER, 'pre-trained', 'hierarchical_DNN_leg.h5')
)
# dnn = Fidgeting_DNN(input_dim=X_train.shape[1])
# dnn.build_model()
# dnn.train_model(X_train, y_train, X_dev, y_dev, class_weight={0: 1, 1: 50})
# dnn.evaluate(X_train, y_train)
# dnn.evaluate(X_dev, y_dev)
# dnn.evaluate(X_test, y_test)
#
# lstm = Fidgeting_LSTM(data_dim=X.shape[2], timesteps=X.shape[1], num_classes=3)
# lstm.build_model()
# lstm.train_model(X_train, y_train, X_dev, y_dev)
# lstm.evaluate(X_train, y_train)
# lstm.evaluate(X_dev, y_dev)
# lstm.evaluate(X_test, y_test)
'''
DEMO
'''
def show_demo(self, play_starting=0, save_video=False):
'''
Show demo
:param starting: int
:param ending: int
:return:
'''
if save_video:
if os.path.exists(self.path_data['generated_demo_video']):
print('video file exists, move on.')
return
cap = cv2.VideoCapture(self.video_path)
data = np.load(self.processed_file)
fps = cap.get(cv2.CAP_PROP_FPS)
print('fps:', fps)
length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
print(length)
fused_data_path = self.path_data['fused_data']
instance_hand_cross_analyser = HandCrossAnalyser('test', participant_data[participant_id][session_id])
continuous_segments, hand_cross_valid_intersect_data = instance_hand_cross_analyser.compute_stationary_rectangles(
cutoff=0, min_length=20)
best_rects = {}
for segment in continuous_segments:
# find largest rectangle for each segment
rects = [hand_cross_valid_intersect_data[i] for i in hand_cross_valid_intersect_data.keys()
if i >= segment[0] and i < segment[1]]
rects = np.array(rects)
best_rect = np.hstack((np.min(rects, axis=0)[:2], np.max(rects, axis=0)[2:]))
for i in range(segment[0], segment[1]):
best_rects[i] = best_rect
print(continuous_segments)
if os.path.exists(fused_data_path):
# Read fused data directly
fused_data = np.load(fused_data_path)
label_array = fused_data[:, 0].reshape((-1, 1)) # H2H + fidgeting
left_hand_arm_label_array = fused_data[:, 1].reshape((-1, 1))
left_hand_leg_label_array = fused_data[:, 2].reshape((-1, 1))
left_hand_face_label_array = fused_data[:, 3].reshape((-1, 1))
right_hand_arm_label_array = fused_data[:, 4].reshape((-1, 1))
right_hand_leg_label_array = fused_data[:, 5].reshape((-1, 1))
right_hand_face_label_array = fused_data[:, 6].reshape((-1, 1))
leg_location_label_array = fused_data[:, 7].reshape((-1, 1))
leg_action_label_array = fused_data[:, 8].reshape((-1, 1))
hand_action_label_array = fused_data[:, 9:11] # shape (n, 2) NOTE!
speaker_array = fused_data[:, 11].reshape((-1, 1))
voice_array = fused_data[:, 12].reshape((-1, 1))
else:
# compute all necessary data
# START!~~~
############################
# processing hand cross info
hand_cross_label_data = json.load(open(
os.path.join(DATA_FOLDER, 'hand_cross_analysis_optical_flow', 'optical_flow_result.json'),
'r'))
try:
hand_cross_label_data = hand_cross_label_data[str(self.participant_id)][str(self.session_id)]
except Exception as e:
print('no hands playing data...')
hand_cross_label_data = {}
window_size = 100
window_step = 50
# generate label array
label_array = np.zeros((data.shape[0], 1))
label_centroid = {}
for segment in hand_cross_label_data.keys():
starting = int(segment.split(',')[0])
ending = int(segment.split(',')[1])
centroid = int(math.floor((starting + ending) / 2))
# p = (centroid, hand_cross_label_data[segment][0], hand_cross_label_data[segment][1])
label_centroid[centroid] = hand_cross_label_data[segment]
# print(label_centroid)
print('preprocessing hand cross data')
for t in range(data.shape[0]):
related_centroids = [(i, label_centroid[i])
for i in range(int(t - 0.5 * window_size), int(t + 0.5 * window_size))
if i in label_centroid.keys()]
if len(related_centroids) == 0:
continue
if len(related_centroids) == 1:
closest_centroid = related_centroids[0]
else:
id_1 = related_centroids[0][0]
id_2 = related_centroids[1][0]
if abs(id_1 - t) < abs(id_2 - t):
closest_centroid = related_centroids[0]
else:
closest_centroid = related_centroids[1]
# print(closest_centroid)
label = closest_centroid[1]
label_array[t, 0] = label
# print(label_array)
############################
# processing leg location info
print('computing leg intersection...')
instance_leg = LegLocationAnalyser(self.name, self.path_data)
_, leg_intersect_data = instance_leg.compute_leg_intersection(cutoff=0, min_length=20)
leg_location_label_array = np.zeros((data.shape[0], 1))
for frame_index in leg_intersect_data.keys():
leg_location_label_array[frame_index, 0] = 1
print('computing left hand location...')
instance_left_hand = HandLocationAnalyser(self.name, self.path_data, hand='left')
left_hand_arm_continuous_segments, left_hand_leg_continuous_segments, left_hand_face_continuous_segments = \
instance_left_hand.compute_hand_intersection(cutoff=0, min_length=80)
left_hand_arm_label_array = self.transfer_to_array(left_hand_arm_continuous_segments)
left_hand_leg_label_array = self.transfer_to_array(left_hand_leg_continuous_segments)
left_hand_face_label_array = self.transfer_to_array(left_hand_face_continuous_segments)
print('computing right hand location...')
instance_right_hand = HandLocationAnalyser(self.name, self.path_data, hand='right')
right_hand_arm_continuous_segments, right_hand_leg_continuous_segments, right_hand_face_continuous_segments = \
instance_right_hand.compute_hand_intersection(cutoff=0, min_length=80)
right_hand_arm_label_array = self.transfer_to_array(right_hand_arm_continuous_segments)
right_hand_leg_label_array = self.transfer_to_array(right_hand_leg_continuous_segments)
right_hand_face_label_array = self.transfer_to_array(right_hand_face_continuous_segments)
############################
# processing leg action info
leg_action_label_data = json.load(open(
os.path.join(DATA_FOLDER, 'leg_action_analysis_optical_flow', 'optical_flow_result.json'),
'r'))
try:
leg_action_label_data = leg_action_label_data[str(self.participant_id)][str(self.session_id)]
except Exception as e:
print('no leg action data...')
leg_action_label_data = {}
print(leg_action_label_data)
# generate label array
leg_action_label_array = np.zeros((data.shape[0], 1))
label_centroid = {}
for segment in leg_action_label_data.keys():
starting = int(segment.split(',')[0])
ending = int(segment.split(',')[1])
centroid = int(math.floor((starting + ending) / 2))
# p = (centroid, hand_cross_label_data[segment][0], hand_cross_label_data[segment][1])
label_centroid[centroid] = leg_action_label_data[segment]
print('preprocessing leg action data')
for t in range(data.shape[0]):
related_centroids = [(i, label_centroid[i])
for i in range(int(t - 0.5 * window_size), int(t + 0.5 * window_size))
if i in label_centroid.keys()]
if len(related_centroids) == 0:
continue
if len(related_centroids) == 1:
closest_centroid = related_centroids[0]
else:
id_1 = related_centroids[0][0]
id_2 = related_centroids[1][0]
if abs(id_1 - t) < abs(id_2 - t):
closest_centroid = related_centroids[0]
else:
closest_centroid = related_centroids[1]
# print(closest_centroid)
label = closest_centroid[1]
leg_action_label_array[t, 0] = label
# print(leg_action_label_array)
############################
# processing hand action info
hand_action_label_data = json.load(open(
os.path.join(DATA_FOLDER, 'hand_action_analysis_optical_flow', 'optical_flow_result.json'),
'r'))
try:
hand_action_label_data = hand_action_label_data[str(self.participant_id)][str(self.session_id)]
except Exception as e:
print('no hands action data...')
hand_action_label_data = {}
window_size = 100
window_step = 50
# generate label array
hand_action_label_array = np.zeros((data.shape[0], 2))
for hand in hand_action_label_data.keys():
label_centroid = {}
for segment in hand_action_label_data[hand].keys():
starting = int(segment.split(',')[0])
ending = int(segment.split(',')[1])
centroid = int(math.floor((starting + ending) / 2))
# p = (centroid, hand_cross_label_data[segment][0], hand_cross_label_data[segment][1])
label_centroid[centroid] = hand_action_label_data[hand][segment]
# print(label_centroid)
print('preprocessing hand action data')
for t in range(data.shape[0]):
related_centroids = [(i, label_centroid[i])
for i in range(int(t - 0.5 * window_size), int(t + 0.5 * window_size))
if i in label_centroid.keys()]
if len(related_centroids) == 0:
continue
if len(related_centroids) == 1:
closest_centroid = related_centroids[0]
else:
id_1 = related_centroids[0][0]
id_2 = related_centroids[1][0]
if abs(id_1 - t) < abs(id_2 - t):
closest_centroid = related_centroids[0]
else:
closest_centroid = related_centroids[1]
# print(closest_centroid)
label = closest_centroid[1]
if hand == 'left':
hand_action_label_array[t, 0] = label
else:
hand_action_label_array[t, 1] = label
# print(hand_action_label_array)
############################
# processing speaker info
print('Reading speaker info...')
speaker_data = json.load(open(self.path_data['speaker_data'], 'r'))
speaker_array = np.full((data.shape[0], 1), -1)
for spk in speaker_data.keys():
sub_spk_data = speaker_data[spk]
spk = int(spk)
# print(spk, '--->')
for segment in sub_spk_data:
starting = segment[0]
ending = segment[1]
starting = math.floor(starting / 1000 * fps)
starting = min(starting, speaker_array.shape[0])
ending = math.ceil(ending / 1000 * fps)
ending = min(ending, speaker_array.shape[0])
speaker_array[starting:ending, :] = spk
# print(starting, ending)
speaker_array = speaker_array.reshape((-1, 1))
############################
# processing voice info
voice_data = json.load(open(self.path_data['voice_data'], 'r'))
voice_array = np.full((data.shape[0], 1), 0)
for segment in voice_data:
starting = segment[0]
ending = segment[1]
starting = math.floor(starting * fps)
starting = min(starting, voice_array.shape[0])
ending = math.ceil(ending * fps)
ending = min(ending, voice_array.shape[0])
voice_array[starting:ending, :] = 1
print(starting, ending)
# plt.plot(range(data.shape[0]), list(voice_array.reshape(-1)))
# plt.show()
voice_array = np.array(savgol_filter(list(voice_array.reshape(-1)), 51, 3)).reshape((-1, 1))
voice_array[voice_array >= 0.3] = 1
voice_array[voice_array < 0.3] = 0
# plt.plot(range(data.shape[0]), list(voice_array.reshape(-1)))
# plt.show()
############################
# Data Fusion and save
print(label_array.shape)
print(left_hand_arm_label_array.shape)
print(left_hand_leg_label_array.shape)
print(left_hand_face_label_array.shape)
print(right_hand_arm_label_array.shape)
print(right_hand_leg_label_array.shape)
print(right_hand_face_label_array.shape)
print(leg_action_label_array.shape)
print(leg_location_label_array.shape)
print(hand_action_label_array.shape)
print(speaker_array.shape)
print(voice_array.shape)
fused_data = np.hstack(
(
label_array, # hand cross data
left_hand_arm_label_array,
left_hand_leg_label_array,
left_hand_face_label_array,
right_hand_arm_label_array,
right_hand_leg_label_array,
right_hand_face_label_array,
leg_location_label_array,
leg_action_label_array,
hand_action_label_array, # shape (n, 2) NOTE!
speaker_array,
voice_array,
)
)
np.save(fused_data_path, fused_data)
#############################################
# some preprocessing of data
left_hand_to_leg_fidget_array = np.zeros((data.shape[0], 1))
left_hand_to_leg_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_leg_label_array == 1)] = 1
right_hand_to_leg_fidget_array = np.zeros((data.shape[0], 1))
right_hand_to_leg_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_leg_label_array == 1)] = 1
left_hand_to_arm_fidget_array = np.zeros((data.shape[0], 1))
left_hand_to_arm_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_arm_label_array == 1)] = 1
right_hand_to_arm_fidget_array = np.zeros((data.shape[0], 1))
right_hand_to_arm_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_arm_label_array == 1)] = 1
left_hand_to_face_fidget_array = np.zeros((data.shape[0], 1))
left_hand_to_face_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_face_label_array == 1)] = 1
right_hand_to_face_fidget_array = np.zeros((data.shape[0], 1))
right_hand_to_face_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_face_label_array == 1)] = 1
leg_fidget_array = leg_action_label_array
leg_fidget_array[leg_fidget_array>1] = 1
leg_fidget_array = np.array(savgol_filter(leg_fidget_array.reshape(-1).tolist(), 51, 3)).reshape((-1, 1))
leg_fidget_array[leg_fidget_array >= 0.8] = 1
leg_fidget_array[leg_fidget_array < 0.8] = 0
#############################################
if not save_video:
input('ready to play!')
cap.set(1, play_starting)
t = play_starting
if save_video:
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(self.path_data['generated_demo_video'], fourcc, cap.get(cv2.CAP_PROP_FPS), (int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))))
while (t < data.shape[0]):
print('video analysing in progress:', t / data.shape[0], end='\r')
ret, frame = cap.read()
# Display all the data points
try:
if int(speaker_array[t]) in speaker_label_data[self.participant_id][self.session_id]:
if voice_array[t, 0] == 1:
frame = self.paint_text(frame, 'Participant Speaking', (350, 200), font_size=1)
# elif int(speaker_array[t]) == -1:
# frame = self.paint_text(frame, 'Unknown', (400, 200), font_size=1)
# else:
# frame = self.paint_text(frame, 'Interviewer speaking', (400, 200), font_size=1)
except:
frame = self.paint_text(frame,
'{} of {} speaking'.format(str(speaker_array[t]), len(speaker_data.keys()) - 1),
(100, 100))
# for i in range(25):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]])
# for i in range(25, 95):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_BLUE)
# for i in range(95, 116):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_GREEN)
# for i in range(116, 137):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_YELLOW)
left_hand_data = data[t, 194:232].reshape(-1, 2)
right_hand_data = data[t, 236:274].reshape(-1, 2)
face_data = data[t, 50:190].reshape(-1, 2)
left_foot_data = data[t, 38:44].reshape(-1, 2)
right_foot_data = data[t, 44:50].reshape(-1, 2)
# frame = self.paint_rectangle_to_points(frame, left_hand_data, color=COLOR_GREEN)
# frame = self.paint_rectangle_to_points(frame, right_hand_data, color=COLOR_YELLOW)
if leg_location_label_array[t, 0] == 1:
frame = self.paint_text(frame, 'Leg cross', (790, 630), font_size=1)
if leg_action_label_array[t, 0] == 2:
frame = self.paint_text(frame, 'Leg dynamic', (790, 560), font_size=1)
#frame = self.paint_rectangle_to_points(frame, [(540, 430), (790, 630)], color=color)
if leg_fidget_array[t, 0] == 1:
for i in range(25):
frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_RED)
frame = self.paint_text(frame, 'Leg Fidgeting', (790, 500), font_size=0.5)
if t in hand_cross_valid_intersect_data.keys():
# print(t)
rect = best_rects[t]
label = label_array[t, 0]
# print(label)
color = COLOR_YELLOW
if label == 1:
color = COLOR_RED
frame = self.paint_text(frame, 'Hand Cross Fidgeting', (rect[2], rect[3]), font_size=0.5)
else:
color = COLOR_GREEN
frame = self.paint_text(frame, 'Hand Cross', (rect[2], rect[3]), font_size=0.5)
frame = cv2.rectangle(frame,
(rect[0] - 10, rect[1] - 10),
(rect[2] + 10, rect[3] + 10),
color,
2)
else:
left_hand_action_label = hand_action_label_array[t, 0]
right_hand_action_label = hand_action_label_array[t, 1]
flag_left = ''
flag_right = ''
if left_hand_action_label == 1:
if left_hand_to_leg_fidget_array[t, 0] == 1:
flag_left = 'Left hand to leg fidgeting'
if left_hand_to_arm_fidget_array[t, 0] == 1:
flag_left = 'Left hand to arm fidgeting'
if left_hand_to_face_fidget_array[t, 0] == 1:
flag_left = 'Left hand to face'
if right_hand_action_label == 1:
if right_hand_to_leg_fidget_array[t, 0] == 1:
flag_right = 'Right hand to leg fidgeting'
if right_hand_to_arm_fidget_array[t, 0] == 1:
flag_right = 'Right hand to arm fidgeting'
if right_hand_to_face_fidget_array[t, 0] == 1:
flag_right = 'Right hand to face'
if flag_left:
frame = self.paint_rectangle_to_points(frame, left_hand_data, color=COLOR_RED)
frame = self.paint_text(frame, flag_left, (400, 400), font_size=0.5)
if flag_right:
frame = self.paint_rectangle_to_points(frame, right_hand_data, color=COLOR_RED)
frame = self.paint_text(frame, flag_right, (400, 450), font_size=0.5)
if save_video:
out.write(frame)
else:
cv2.imshow('frame', frame)
if cv2.waitKey(40) & 0xFF == ord('q'):
break
# if t == starting:
# input()
t += 1
# except Exception as e:
# print(e)
if save_video:
out.release()
cap.release()
cv2.destroyAllWindows()
def show_actor_demo(self, play_starting=0, save_video=False):
'''
Show demo
:param starting: int
:param ending: int
:return:
'''
if save_video:
create_dirs([os.path.split(self.path_data['generated_demo_video'])[0]])
print(self.path_data['generated_demo_video'])
if os.path.exists(self.path_data['generated_demo_video']):
print('video file exists, move on.')
return
cap = cv2.VideoCapture(self.video_path)
data = np.load(self.processed_file)
fps = cap.get(cv2.CAP_PROP_FPS)
print('fps:', fps)
length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
print(length)
fused_data_path = self.path_data['fused_data']
create_dirs([os.path.split(fused_data_path)[0]])
instance_hand_cross_analyser = HandCrossAnalyser('test', participant_data[participant_id][session_id])
continuous_segments, hand_cross_valid_intersect_data = instance_hand_cross_analyser.compute_stationary_rectangles(
cutoff=0, min_length=20)
best_rects = {}
for segment in continuous_segments:
# find largest rectangle for each segment
rects = [hand_cross_valid_intersect_data[i] for i in hand_cross_valid_intersect_data.keys()
if i >= segment[0] and i < segment[1]]
rects = np.array(rects)
best_rect = np.hstack((np.min(rects, axis=0)[:2], np.max(rects, axis=0)[2:]))
for i in range(segment[0], segment[1]):
best_rects[i] = best_rect
print(continuous_segments)
if os.path.exists(fused_data_path):
# Read fused data directly
fused_data = np.load(fused_data_path)
label_array = fused_data[:, 0].reshape((-1, 1)) # H2H + fidgeting
left_hand_arm_label_array = fused_data[:, 1].reshape((-1, 1))
left_hand_leg_label_array = fused_data[:, 2].reshape((-1, 1))
left_hand_face_label_array = fused_data[:, 3].reshape((-1, 1))
right_hand_arm_label_array = fused_data[:, 4].reshape((-1, 1))
right_hand_leg_label_array = fused_data[:, 5].reshape((-1, 1))
right_hand_face_label_array = fused_data[:, 6].reshape((-1, 1))
leg_location_label_array = fused_data[:, 7].reshape((-1, 1))
leg_action_label_array = fused_data[:, 8].reshape((-1, 1))
hand_action_label_array = fused_data[:, 9:11] # shape (n, 2) NOTE!
# speaker_array = fused_data[:, 11].reshape((-1, 1))
# voice_array = fused_data[:, 12].reshape((-1, 1))
else:
# compute all necessary data
# START!~~~
############################
# processing hand cross info
hand_cross_label_data = json.load(open(
os.path.join(DATA_FOLDER, 'hand_cross_analysis_optical_flow', 'optical_flow_result.json'),
'r'))
try:
hand_cross_label_data = hand_cross_label_data[str(self.participant_id)][str(self.session_id)]
except Exception as e:
print('no hands playing data...')
hand_cross_label_data = {}
window_size = 100
window_step = 50
# generate label array
label_array = np.zeros((data.shape[0], 1))
label_centroid = {}
for segment in hand_cross_label_data.keys():
starting = int(segment.split(',')[0])
ending = int(segment.split(',')[1])
centroid = int(math.floor((starting + ending) / 2))
# p = (centroid, hand_cross_label_data[segment][0], hand_cross_label_data[segment][1])
label_centroid[centroid] = hand_cross_label_data[segment]
# print(label_centroid)
print('preprocessing hand cross data')
for t in range(data.shape[0]):
related_centroids = [(i, label_centroid[i])
for i in range(int(t - 0.5 * window_size), int(t + 0.5 * window_size))
if i in label_centroid.keys()]
if len(related_centroids) == 0:
continue
if len(related_centroids) == 1:
closest_centroid = related_centroids[0]
else:
id_1 = related_centroids[0][0]
id_2 = related_centroids[1][0]
if abs(id_1 - t) < abs(id_2 - t):
closest_centroid = related_centroids[0]
else:
closest_centroid = related_centroids[1]
# print(closest_centroid)
label = closest_centroid[1]
label_array[t, 0] = label
# print(label_array)
############################
# processing leg location info
print('computing leg intersection...')
instance_leg = LegLocationAnalyser(self.name, self.path_data)
_, leg_intersect_data = instance_leg.compute_leg_intersection(cutoff=0, min_length=20)
leg_location_label_array = np.zeros((data.shape[0], 1))
for frame_index in leg_intersect_data.keys():
leg_location_label_array[frame_index, 0] = 1
print('computing left hand location...')
instance_left_hand = HandLocationAnalyser(self.name, self.path_data, hand='left')
left_hand_arm_continuous_segments, left_hand_leg_continuous_segments, left_hand_face_continuous_segments = \
instance_left_hand.compute_hand_intersection(cutoff=0, min_length=20)
left_hand_arm_label_array = self.transfer_to_array(left_hand_arm_continuous_segments)
left_hand_leg_label_array = self.transfer_to_array(left_hand_leg_continuous_segments)
left_hand_face_label_array = self.transfer_to_array(left_hand_face_continuous_segments)
print('computing right hand location...')
instance_right_hand = HandLocationAnalyser(self.name, self.path_data, hand='right')
right_hand_arm_continuous_segments, right_hand_leg_continuous_segments, right_hand_face_continuous_segments = \
instance_right_hand.compute_hand_intersection(cutoff=0, min_length=20)
right_hand_arm_label_array = self.transfer_to_array(right_hand_arm_continuous_segments)
right_hand_leg_label_array = self.transfer_to_array(right_hand_leg_continuous_segments)
right_hand_face_label_array = self.transfer_to_array(right_hand_face_continuous_segments)
############################
# processing leg action info
leg_action_label_data = json.load(open(
os.path.join(DATA_FOLDER, 'leg_action_analysis_optical_flow', 'optical_flow_result.json'),
'r'))
try:
leg_action_label_data = leg_action_label_data[str(self.participant_id)][str(self.session_id)]
except Exception as e:
print('no leg action data...')
leg_action_label_data = {}
print(leg_action_label_data)
# generate label array
leg_action_label_array = np.zeros((data.shape[0], 1))
label_centroid = {}
for segment in leg_action_label_data.keys():
starting = int(segment.split(',')[0])
ending = int(segment.split(',')[1])
centroid = int(math.floor((starting + ending) / 2))
# p = (centroid, hand_cross_label_data[segment][0], hand_cross_label_data[segment][1])
label_centroid[centroid] = leg_action_label_data[segment]
print('preprocessing leg action data')
for t in range(data.shape[0]):
related_centroids = [(i, label_centroid[i])
for i in range(int(t - 0.5 * window_size), int(t + 0.5 * window_size))
if i in label_centroid.keys()]
if len(related_centroids) == 0:
continue
if len(related_centroids) == 1:
closest_centroid = related_centroids[0]
else:
id_1 = related_centroids[0][0]
id_2 = related_centroids[1][0]
if abs(id_1 - t) < abs(id_2 - t):
closest_centroid = related_centroids[0]
else:
closest_centroid = related_centroids[1]
# print(closest_centroid)
label = closest_centroid[1]
leg_action_label_array[t, 0] = label
# print(leg_action_label_array)
############################
# processing hand action info
hand_action_label_data = json.load(open(
os.path.join(DATA_FOLDER, 'hand_action_analysis_optical_flow', 'optical_flow_result.json'),
'r'))
try:
hand_action_label_data = hand_action_label_data[str(self.participant_id)][str(self.session_id)]
except Exception as e:
print('no hands action data...')
hand_action_label_data = {}
window_size = 100
window_step = 50
# generate label array
hand_action_label_array = np.zeros((data.shape[0], 2))
for hand in hand_action_label_data.keys():
label_centroid = {}
for segment in hand_action_label_data[hand].keys():
starting = int(segment.split(',')[0])
ending = int(segment.split(',')[1])
centroid = int(math.floor((starting + ending) / 2))
# p = (centroid, hand_cross_label_data[segment][0], hand_cross_label_data[segment][1])
label_centroid[centroid] = hand_action_label_data[hand][segment]
# print(label_centroid)
print('preprocessing hand action data')
for t in range(data.shape[0]):
related_centroids = [(i, label_centroid[i])
for i in range(int(t - 0.5 * window_size), int(t + 0.5 * window_size))
if i in label_centroid.keys()]
if len(related_centroids) == 0:
continue
if len(related_centroids) == 1:
closest_centroid = related_centroids[0]
else:
id_1 = related_centroids[0][0]
id_2 = related_centroids[1][0]
if abs(id_1 - t) < abs(id_2 - t):
closest_centroid = related_centroids[0]
else:
closest_centroid = related_centroids[1]
# print(closest_centroid)
label = closest_centroid[1]
if hand == 'left':
hand_action_label_array[t, 0] = label
else:
hand_action_label_array[t, 1] = label
# print(hand_action_label_array)
############################
# Data Fusion and save
print(label_array.shape)
print(left_hand_arm_label_array.shape)
print(left_hand_leg_label_array.shape)
print(left_hand_face_label_array.shape)
print(right_hand_arm_label_array.shape)
print(right_hand_leg_label_array.shape)
print(right_hand_face_label_array.shape)
print(leg_action_label_array.shape)
print(leg_location_label_array.shape)
print(hand_action_label_array.shape)
# print(speaker_array.shape)
# print(voice_array.shape)
fused_data = np.hstack(
(
label_array, # hand cross data
left_hand_arm_label_array,
left_hand_leg_label_array,
left_hand_face_label_array,
right_hand_arm_label_array,
right_hand_leg_label_array,
right_hand_face_label_array,
leg_location_label_array,
leg_action_label_array,
hand_action_label_array, # shape (n, 2) NOTE!
# speaker_array,
# voice_array,
)
)
np.save(fused_data_path, fused_data)
#############################################
# some preprocessing of data
left_hand_to_leg_fidget_array = np.zeros((data.shape[0], 1))
left_hand_to_leg_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_leg_label_array == 1)] = 1
right_hand_to_leg_fidget_array = np.zeros((data.shape[0], 1))
right_hand_to_leg_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_leg_label_array == 1)] = 1
left_hand_to_arm_fidget_array = np.zeros((data.shape[0], 1))
left_hand_to_arm_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_arm_label_array == 1)] = 1
right_hand_to_arm_fidget_array = np.zeros((data.shape[0], 1))
right_hand_to_arm_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_arm_label_array == 1)] = 1
left_hand_to_face_fidget_array = np.zeros((data.shape[0], 1))
left_hand_to_face_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_face_label_array == 1)] = 1
right_hand_to_face_fidget_array = np.zeros((data.shape[0], 1))
right_hand_to_face_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_face_label_array == 1)] = 1
leg_fidget_array = leg_action_label_array
leg_fidget_array[leg_fidget_array>1] = 1
leg_fidget_array = np.array(savgol_filter(leg_fidget_array.reshape(-1).tolist(), 51, 3)).reshape((-1, 1))
leg_fidget_array[leg_fidget_array >= 0.8] = 1
leg_fidget_array[leg_fidget_array < 0.8] = 0
right_hand_to_leg_fidget_array = self.transfer_to_array(self.transfer_to_segments(right_hand_to_leg_fidget_array, min_length=100))
left_hand_to_leg_fidget_array = self.transfer_to_array(
self.transfer_to_segments(left_hand_to_leg_fidget_array, min_length=100))
#############################################
if not save_video:
input('ready to play!')
cap.set(1, play_starting)
t = play_starting
if save_video:
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(self.path_data['generated_demo_video'], fourcc, cap.get(cv2.CAP_PROP_FPS), (int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))))
while (t < data.shape[0]):
print('video analysing in progress:', t / data.shape[0], end='\r')
ret, frame = cap.read()
# Display all the data points
# try:
# if int(speaker_array[t]) in speaker_label_data[self.participant_id][self.session_id]:
# if voice_array[t, 0] == 1:
# frame = self.paint_text(frame, 'Participant Speaking', (350, 200), font_size=1)
# # elif int(speaker_array[t]) == -1:
# # frame = self.paint_text(frame, 'Unknown', (400, 200), font_size=1)
# # else:
# # frame = self.paint_text(frame, 'Interviewer speaking', (400, 200), font_size=1)
# except:
# frame = self.paint_text(frame,
# '{} of {} speaking'.format(str(speaker_array[t]), len(speaker_data.keys()) - 1),
# (100, 100))
# for i in range(25):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]])
# for i in range(25, 95):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_BLUE)
# for i in range(95, 116):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_GREEN)
# for i in range(116, 137):
# frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_YELLOW)
left_hand_data = data[t, 194:232].reshape(-1, 2)
right_hand_data = data[t, 236:274].reshape(-1, 2)
face_data = data[t, 50:190].reshape(-1, 2)
left_foot_data = data[t, 38:44].reshape(-1, 2)
right_foot_data = data[t, 44:50].reshape(-1, 2)
# frame = self.paint_rectangle_to_points(frame, left_hand_data, color=COLOR_GREEN)
# frame = self.paint_rectangle_to_points(frame, right_hand_data, color=COLOR_YELLOW)
# if leg_location_label_array[t, 0] == 1:
# frame = self.paint_text(frame, 'Leg cross', (790, 630), font_size=1)
#
# if leg_action_label_array[t, 0] == 2:
# frame = self.paint_text(frame, 'Leg dynamic', (790, 560), font_size=1)
#frame = self.paint_rectangle_to_points(frame, [(540, 430), (790, 630)], color=color)
if leg_fidget_array[t, 0] == 1:
for i in range(25):
frame = self.paint_point(frame, [data[t, i * 2], data[t, i * 2 + 1]], color=COLOR_RED)
frame = self.paint_text(frame, 'Leg Fidgeting', (400, 350), font_size=0.7)
if t in hand_cross_valid_intersect_data.keys():
# print(t)
rect = best_rects[t]
label = label_array[t, 0]
# print(label)
color = COLOR_YELLOW
if label == 1:
color = COLOR_RED
frame = self.paint_text(frame, 'Hand Cross Fidgeting', (rect[2], rect[3]), font_size=0.5)
else:
color = COLOR_GREEN
frame = self.paint_text(frame, 'Hand Cross', (rect[2], rect[3]), font_size=0.5)
frame = cv2.rectangle(frame,
(rect[0] - 10, rect[1] - 10),
(rect[2] + 10, rect[3] + 10),
color,
2)
else:
left_hand_action_label = hand_action_label_array[t, 0]
right_hand_action_label = hand_action_label_array[t, 1]
flag_left = ''
flag_right = ''
if left_hand_action_label == 1:
if left_hand_to_leg_fidget_array[t, 0] == 1:
flag_left = 'Left hand to leg fidgeting'
if left_hand_to_arm_fidget_array[t, 0] == 1:
flag_left = 'Left hand to arm fidgeting'
if left_hand_to_face_fidget_array[t, 0] == 1:
flag_left = 'Left hand to face'
if right_hand_action_label == 1:
if right_hand_to_leg_fidget_array[t, 0] == 1:
flag_right = 'Right hand to leg fidgeting'
if right_hand_to_arm_fidget_array[t, 0] == 1:
flag_right = 'Right hand to arm fidgeting'
if right_hand_to_face_fidget_array[t, 0] == 1:
flag_right = 'Right hand to face'
if flag_left:
frame = self.paint_rectangle_to_points(frame, left_hand_data, color=COLOR_RED)
frame = self.paint_text(frame, flag_left, (400, 400), font_size=0.5)
if flag_right:
frame = self.paint_rectangle_to_points(frame, right_hand_data, color=COLOR_RED)
frame = self.paint_text(frame, flag_right, (400, 450), font_size=0.5)
if save_video:
out.write(frame)
else:
cv2.imshow('frame', frame)
if cv2.waitKey(40) & 0xFF == ord('q'):
break
# if t == starting:
# input()
t += 1
# except Exception as e:
# print(e)
if save_video:
out.release()
cap.release()
cv2.destroyAllWindows()
def generate_training_data(self):
# slicing videos and assign labels
for participant_id in participant_data.keys():
if participant_id in [109]:
continue
for session_id in participant_data[participant_id].keys():
try:
sub_pipeline = MainPipeline('test', participant_data[participant_id][session_id])
print('working on', participant_id, session_id)
fused_data_path = sub_pipeline.path_data['fused_data']
print(fused_data_path)
fused_data = np.load(fused_data_path)
print(fused_data.shape)
hand_cross_fidget_label_array = fused_data[:, 0].reshape((-1, 1)) # hand cross fidget data
left_hand_arm_label_array = fused_data[:, 1].reshape((-1, 1))
left_hand_leg_label_array = fused_data[:, 2].reshape((-1, 1))
left_hand_face_label_array = fused_data[:, 3].reshape((-1, 1))
right_hand_arm_label_array = fused_data[:, 4].reshape((-1, 1))
right_hand_leg_label_array = fused_data[:, 5].reshape((-1, 1))
right_hand_face_label_array = fused_data[:, 6].reshape((-1, 1))
leg_location_label_array = fused_data[:, 7].reshape((-1, 1))
leg_action_label_array = fused_data[:, 8].reshape((-1, 1))
hand_action_label_array = fused_data[:, 9:11] # shape (n, 2) NOTE!
speaker_array = fused_data[:, 11].reshape((-1, 1))
voice_array = fused_data[:, 12].reshape((-1, 1))
left_hand_to_leg_fidget_array = np.zeros((fused_data.shape[0], 1))
left_hand_to_leg_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_leg_label_array == 1)] = 1
right_hand_to_leg_fidget_array = np.zeros((fused_data.shape[0], 1))
right_hand_to_leg_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_leg_label_array == 1)] = 1
left_hand_to_arm_fidget_array = np.zeros((fused_data.shape[0], 1))
left_hand_to_arm_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_arm_label_array == 1)] = 1
right_hand_to_arm_fidget_array = np.zeros((fused_data.shape[0], 1))
right_hand_to_arm_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_arm_label_array == 1)] = 1
left_hand_to_face_fidget_array = np.zeros((fused_data.shape[0], 1))
left_hand_to_face_fidget_array[
(hand_action_label_array[:, 0].reshape((-1, 1)) == 1) & (left_hand_face_label_array == 1)] = 1
right_hand_to_face_fidget_array = np.zeros((fused_data.shape[0], 1))
right_hand_to_face_fidget_array[
(hand_action_label_array[:, 1].reshape((-1, 1)) == 1) & (right_hand_face_label_array == 1)] = 1
leg_fidget_array = leg_action_label_array
leg_fidget_array[leg_fidget_array > 1] = 1
leg_fidget_array = np.array(savgol_filter(leg_fidget_array.reshape(-1).tolist(), 51, 3)).reshape(
(-1, 1))
leg_fidget_array[leg_fidget_array >= 0.8] = 1
leg_fidget_array[leg_fidget_array < 0.8] = 0
speaking_array = np.zeros((fused_data.shape[0], 1))
for speaker_label in speaker_label_data[participant_id][session_id]:
speaking_array[(speaker_array == speaker_label)] = 1
speaking_array[voice_array == 0] = 0
# hand_cross_fidget_label_array[speaking_array == 0] = 0
# left_hand_to_leg_fidget_array[speaking_array == 0] = 0
# right_hand_to_leg_fidget_array[speaking_array == 0] = 0
# left_hand_to_arm_fidget_array[speaking_array == 0] = 0
# right_hand_to_arm_fidget_array[speaking_array == 0] = 0
# left_hand_to_face_fidget_array[speaking_array == 0] = 0
# right_hand_to_face_fidget_array[speaking_array == 0] = 0
# leg_fidget_array[speaking_array == 0] = 0
# fused_data = np.hstack((
# hand_cross_fidget_label_array,
# left_hand_to_leg_fidget_array,
# right_hand_to_leg_fidget_array,
# left_hand_to_arm_fidget_array,
# right_hand_to_arm_fidget_array,
# left_hand_to_face_fidget_array,
# right_hand_to_face_fidget_array,
# leg_fidget_array,
# speaking_array,
# ))
fused_data = np.hstack((
hand_cross_fidget_label_array,
left_hand_arm_label_array,
left_hand_leg_label_array,
left_hand_face_label_array,
right_hand_arm_label_array,
right_hand_leg_label_array,
right_hand_face_label_array,
leg_location_label_array,
leg_action_label_array,
hand_action_label_array, # shape (n, 2) NOTE!
speaker_array,
voice_array,
))
print('processed:', fused_data.shape)
processed_data = np.load(sub_pipeline.processed_smooth_file)
print(processed_data.shape)
gaze_data = processed_data[:, list(range(274, 282))]
AUs_data = processed_data[:, list(range(948, 983))]
gaze_data = stats.zscore(gaze_data, axis=1, ddof=1)
# processed_data = processed_data[:, list(range(274, 282)) + list(range(948, 983))]
# print(processed_data.shape)
training_data = np.hstack((fused_data, gaze_data, AUs_data))
# fv_model = FisherVectorGMM(n_kernels=128)
# fv_model.fit(training_data)
# fv_training_data = fv_model.predict(training_data, normalized=False)
# print(fv_training_data.shape)
# # training_data = processed_data
# training_data = {
# 'data': training_data,
# 'label': participant_score_data[participant_id]
# }
full_fused_data = {
'data': fused_data,
'label': participant_depression_data[participant_id],
}
pickle.dump(full_fused_data, open(sub_pipeline.path_data['full_fused_data'], 'wb'))
except:
pass
if __name__ == '__main__':
pprint(participant_data)
for participant_id in participant_data.keys():
for session_id in participant_data[participant_id].keys():
print(participant_id, session_id)
processor = VideoProcessor('processing', participant_data[participant_id][session_id])
processor.preprocess_actor_keypoints(smooth=True)
processor.preprocess_actor_keypoints(smooth=False)
main_pipeline = MainPipeline('test', participant_data[participant_id][session_id])
main_pipeline.generate_leg_slice()
main_pipeline.generate_hand_slice()
main_pipeline.generate_hand_cross_slice()
instance_hand_cross_analyser = HandCrossAnalyser('test', None)
instance_hand_cross_analyser.analyse_hand_cross_optical_flow()
instance_leg_action_analyser = LegActionAnalyser('test', None)
instance_leg_action_analyser.analyse_leg_action_optical_flow()
instance_hand_action_analyser = HandCrossAnalyser('test', None)
instance_hand_action_analyser.analyse_hand_action_optical_flow()
main_pipeline.show_actor_demo(save_video=True)
| 45.162006
| 191
| 0.558132
| 11,060
| 93,666
| 4.386166
| 0.037884
| 0.035662
| 0.0141
| 0.015667
| 0.890973
| 0.862938
| 0.845643
| 0.823689
| 0.808518
| 0.786461
| 0
| 0.024098
| 0.338095
| 93,666
| 2,074
| 192
| 45.162006
| 0.758363
| 0.110969
| 0
| 0.753758
| 0
| 0
| 0.054426
| 0.012576
| 0
| 0
| 0.000097
| 0.000482
| 0.002863
| 1
| 0.013601
| false
| 0.000716
| 0.024338
| 0.003579
| 0.045812
| 0.083751
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2813e1128436e9fb6ef30e9a644477057aa372c
| 103,382
|
py
|
Python
|
alembic/versions/fe33bf7f5c52_create_ericsson_2g_default_tables_for_.py
|
bodastage/bts-database
|
96df7915621dd46daf55016eedf5cfc84dd0e3a2
|
[
"Apache-2.0"
] | 1
|
2019-08-30T01:20:14.000Z
|
2019-08-30T01:20:14.000Z
|
alembic/versions/fe33bf7f5c52_create_ericsson_2g_default_tables_for_.py
|
bodastage/bts-database
|
96df7915621dd46daf55016eedf5cfc84dd0e3a2
|
[
"Apache-2.0"
] | 1
|
2018-05-30T09:29:24.000Z
|
2018-05-30T10:04:37.000Z
|
alembic/versions/fe33bf7f5c52_create_ericsson_2g_default_tables_for_.py
|
bodastage/bts-database
|
96df7915621dd46daf55016eedf5cfc84dd0e3a2
|
[
"Apache-2.0"
] | 3
|
2018-03-10T23:29:30.000Z
|
2019-02-19T22:11:09.000Z
|
"""Create Ericsson 2G default tables for cnaiv2
Revision ID: fe33bf7f5c52
Revises: 34b2f24951a8
Create Date: 2018-02-07 03:35:07.119000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'fe33bf7f5c52'
down_revision = '34b2f24951a8'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('BSC',
*[sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AFRVAMOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AHRVAMOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AWBVAMOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ALPHA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR3MODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR3THR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR3HYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR3ICM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR4ICM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR3ICM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR4ICM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR4MODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR4THR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSFR4HYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR3MODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR3THR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR3HYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR4MODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR4THR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSHR4HYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRFRSUPPORT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRHRSUPPORT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRSPEECHVERUSE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AQMSUPPORT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AQMMINBUFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AQMRTTCONST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AQMMAXIPSIZE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AQMMINIPSIZE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('REEST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('REPFACCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSWB1ICM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSWB1HYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRCSWB1THR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBDHA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBSUPPORT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBFB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBSPVERUSE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBDYMAPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AFLP_TIME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITADMCTRL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITGMAXUSEUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITEMAXUSEUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITGMAXUSEDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITEMAXUSEDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITQOSPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHNORMAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSCAFLP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSCAIRCT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSCFSOFFSETLSW', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSCFSOFFSETMSW', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSCMC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSSRELEASE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CAP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CHCODING', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CNID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('COEXUMTS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('COEXUMTSLSH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('COEXUMTSTINT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CONFMODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRSEDAACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRSNWMODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DCA_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DISPP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DLDELAY', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTCBSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTXFUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('REGINTDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('REGINTUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSLENDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSLENUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLENDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLENUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EFRVAMOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EMRSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EBANDINCLUDED', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITHIGHCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITSCHEDFREQH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITSCHEDFREQL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ESDELAY', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FRVAMOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FBCHALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FASTRET3GNC2', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ONDEMANDGPHDEV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_ASSOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_EVALTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_IBHOASS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_IBHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_IHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_NHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_TAAVELEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_TALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_THO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_TINIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1800_TURGEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EGPRSIRUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EMERGPRL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ENHAMRSIGN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EXTCELLNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EXTHANDOVERNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRS5TSDLACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRSAVAINT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRSNEUTRALACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_ASSOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_EVALTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_IBHOASS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_IBHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_IHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_NHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_TAAVELEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_TALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_THO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_TINIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM900_TURGEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_ASSOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_EVALTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_IBHOASS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_IBHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_IHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_NHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_TAAVELEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_TALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_THO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_TINIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM800_TURGEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('G_SYS_TYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HALFRATESUPP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSCHAVAILTIMER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND1', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND2', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND3', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND4', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND5', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND6', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND7', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBAND8', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR1', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR2', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR3', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR4', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR5', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR6', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR7', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSBANDTHR8', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSTRAFDISSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HIGHFERULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HIGHFERDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HOREQLACCI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HRVAMOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HOMTD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXTGCL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCIMALG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('INTCELLNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('INTHANDOVERNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOADOPT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOASNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOPTETHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOPTGTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCHIGHMCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCMODEUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCMODEDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCDEFAULTMCSDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCDEFAULTMCSUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCDEFMCSDLE2A', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCDEFMCSULE2A', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCHIGHMCSDLE2A', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCHIGHMCSULE2A', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LQCUNACK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LS_STATUS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXCELLSINLAYER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXCHDATARATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXDBDEVINLAYER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXNOCHGRP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXNOSDCCHTRX', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXPWRUTDOA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCPABCCHOFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MBCRAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MNCDIGITHAND', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MODHOTOHCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSCPOOL_ID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSEITRESPTIME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSQHOPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSQUEUING', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NACCACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NOTIFP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_ABISOPT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_ADAPTCONF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_ADMCTRL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_REDPACLAT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_RLINKTIMERAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AUTOFLP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AUTOIRCT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_RPWRHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AMRHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AMRPWRCTRL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AMRWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AMRWBMAXTRAFFIC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_ATHAABIS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_AUTOHFSEXPAND', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_TIGHTBCCHREUSE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_BCCHPS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_BTSPS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_BTSPWRCTRL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_COMBCRESUMTS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_COMBINEDCELL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_CAPCNTRLVAMOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_CRESLTE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DAMRREDUCE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DTM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DTMMSCLASS11', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DYMA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DYNHRALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DYNHRALLOCWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DYNOLULSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EFTA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EGPRS2ABPCLIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EGPRSBPCLIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EPOG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_ENHAMRCOV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EXTNUMCELL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FEATSYNCHRNW', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FF_CHAVSUALENH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FLEXABIS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FASTRETURN3G', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_G1GSMBAND', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRS5TSDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSAQM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_ENHOSUCCRATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSNEUTRAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSCS3CS4', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSEDA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSEIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSLOADOPT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSPULS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_GPRSQOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_HCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_HCSBAND', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_HDRATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_HPBOOST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_HRC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_HSCSD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_IM3G', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_INCRSDCCHCAP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_ISHOLSH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_IURG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MAIOMANAGEMENT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MAXNUMCELLS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MCPAPS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MIP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MIPROUTE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MIXEDMODERADIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MSPWRCTRL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MULTIBANDCELL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NACC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_PMSQ', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_PFASTMSREG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_PKTCELLPLAN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_VGCSPMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_PREEMPTION', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_RANDOMFILL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_QOSSTREAM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SBHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SEMIPDCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SPIDPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SUPCOEXUMTS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SUPIRC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SQPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SQSSUPPORT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_TCHOPT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_TFO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EGPRSIRU', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_XRANGEC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_XRANGESC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EMRSUPPORT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_VGCSDYNAMIC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_VGCSENCR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_VGCSTALKID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE1', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE2', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE3', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE4', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE5', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE6', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE7', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE8', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE9', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE10', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_NC2PROFILE255', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MCNSUPPORT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_UTDOA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_VAMOSADVANCED', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_VAMOSMAXTRAFFIC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FF_USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OP_MODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FF_TDSCDMA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FF_CRESTDSCDMA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_WCDMAQUEUE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PART', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PAGPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PCUQOS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PILTIMERFLEX', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PILTIMER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NSEIRELATION', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PMRSUPP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PSCELLPLAN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_ASSOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_EVALTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_IBHOASS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_IBHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_IHOSICH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_NHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_TAAVELEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_TALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_THO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_TINIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GSM1900_TURGEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PCUEIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RAND', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SACCHDLTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SACCHULTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SBHOACTIVE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SPEECHVERUSED', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_SDCCH_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_SDCCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_SDCCHPL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_TCH_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_TCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_TCHPL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SMPC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TCHOPTIMIZATION', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TBFMODEACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HSCSDUPGTIMER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PHHSCSD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PHSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BADQDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BADQUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOWSSDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOWSSUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FASTASSIGN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXLOAD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TIMER3105', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NOOFPHYSINFOMSG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CLMRKMSG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CLSTIMEINTERVAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TRACEMSGTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SPEQINDCOLLECT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SPIDTABLE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TABLEID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SPID_ACTION', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TALKID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TFOPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TEITPENDING', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TFILIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TBFDLLIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TBFULLIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TIMERT3TRC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TRXOFFDELAY', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TRXOFFTARGET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TRXONTARGET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PULSCHEDINT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QASSTIME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOSMAPPING', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOSSTREAMPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOSCONVPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOSTHP1', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOSTHP2', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('THPMBRFACTOR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RNDFILL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TRC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ULDELAY', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USFLIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('UTRANEXTCELLNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('UTRANNRELNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('UPDWNRATIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TSTREAMSTART', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TSTREAMPENDING', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('UTDOAMRMODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VAMOSBSCSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VAMOSMAXTRAFFIC_CLAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VAMOSMAXTRAFFIC_CLTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CAPACITYLOCKS', sa.Text, autoincrement=False, nullable=True),
sa.Column('VERSION', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VGENCR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VGPRECEDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VGPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_INTERBSCNACC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_REDLAT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FF_MIXHOP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_DCDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_IMSIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSCIMSIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IMSIPATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IMSIPATB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IMSIPATC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IMSIPATD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCCPERMA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCCPERMB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCCPERMC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCCPERMD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_CAPCNTRLSCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EMERGENCYMODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRFRMAXTRAFFIC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRHRMAXTRAFFIC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EFRMAXTRAFFIC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HRMAXTRAFFIC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRFRMAXTRAFFIC_CLAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRFRMAXTRAFFIC_CLTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRHRMAXTRAFFIC_CLAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRHRMAXTRAFFIC_CLTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBMAXTRAFFIC_CLAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRWBMAXTRAFFIC_CLTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HRMAXTRAFFIC_CLAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HRMAXTRAFFIC_CLTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EFRMAXTRAFFIC_CLAL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EFRMAXTRAFFIC_CLTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MULTICCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PAGBUNDLE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_SMSCBADVANCED', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SMSCBS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_PSDLPC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FASTRETURNLTE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_LTEGSMNACC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LTEGSMNACCSTATUS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_BTSSOFTSYNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_EPU', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_APSULPC', sa.CHAR(length=250), autoincrement=False, nullable=True)],
schema='ericsson_cnaiv2'
)
op.create_table('CHANNEL_GROUP',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BAND', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CBCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CHGR_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DCHNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EACPREF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EXCHGR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HOP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HSN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMREQBPC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMREQE2ABPC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMREQEGPRSBPC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMREQCS3CS4BPC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ODPDCHLIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SAS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SDCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TG_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TN7BCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HOPTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ETCHTN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TNBCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWRT', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('EXTERNAL_CELL',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AW', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSRXMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSRXSUFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSTXPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('C_SYS_TYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DFI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FASTMSREG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EXTPEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAYER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAYERHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAYERTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MISSNM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSRXMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSRXSUFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSTXPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PHCSTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PLAYER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PSSTEMP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PTIMTEMP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RIMNACC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('INNER_CELL',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOCNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RO', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('INTERNAL_CELL',
*[sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ACC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ACCMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ACSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ACTIVE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AFLP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AIRCT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AGBLK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ALLOCPREF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AMRPCSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ANTENNA_GAIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ANTENNA_TILT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ANTENNA_TYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AQPSKONBCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ASSV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ATHABISPR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ATT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AW', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHDTCB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHDTCBHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHLOSS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHLOSSHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHREUSE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BPDCHBR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWRB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWRMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWRT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BPCDEL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSRPWRHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSRPWROFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSRXMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSRXSUFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BTSPSHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BTSPS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSTXPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CBCHD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CBQ', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CCHPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_DIR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_TYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELLQ', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CHAP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CHCSDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CHMAX', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CLSACC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CLSLEVEL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CLSRAMP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CLS_STATUS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CLTHV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CODECREST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CMDR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CP_BCC_TSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CRH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CRO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CSPSALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CSPSPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('C_SYS_TYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DAMRCRABISPR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DHA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DHASS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DHASSTHRASS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DHASSTHRHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DHPR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DISPLAY_TAG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTHAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTHNAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMPR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMQB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMQBAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMQBNAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMQG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMQGAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMQGNAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMSUPP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMTFAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMTFNAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMTHAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DMTHNAMR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTMSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTXD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTXU', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DYNBTSPWR_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DYNMSPWR_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DYNVGCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EFTASTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('E2AFACTOR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('E2APDCHBR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('E2ALQC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('E2AINITMCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ECSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EFACTOR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EINITMCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EITEXCLUDED', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ENV_CHAR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EPDCHBR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FASTMSREG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FASTRET3G', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FBOFFS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FBVGCHALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDMRR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDQMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDQOFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDQMINOFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDRSCPMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDREPTHR2', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FLEXHIGHGPRS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FERLEN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FNOFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FPDCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FULLAQPSK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GAMMA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPDCHBR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRSPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRSSUP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HCSOUT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HEIGHT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HOCLSACC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HPBSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HYSTSEP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IAN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ICMSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IDLE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('INTAVE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IRC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ISHOLEV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LATITUDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAYER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAYERHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAYERTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LCOMPDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LCOMPUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LIMIT1', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LIMIT2', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LIMIT3', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LIMIT4', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LONGITUDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXISHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXLAPDM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXRET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXSBLK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXSMSG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXTA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAX_ALTITUDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAX_CELL_RADIUS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MBCR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MFRMS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MIN_ALTITUDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MINREQTCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCPAPS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCPAPSHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MISSNM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSRPWRHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSRPWROFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSRXMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSRXSUFF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSTXPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCCPERM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCSTAT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCRPT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCPROF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NECI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMTS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMEGPRSTS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OPTMSCLASS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OWNBCCHINACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OWNBCCHINIDLE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PHCSTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PLAYER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PLMNNAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRACHBLK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRIMPLIM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PSKONBCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PSSBQ', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PSSHF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PSSTA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PSSTEMP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PTIMBQ', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PTIMHF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PTIMTA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PTIMTEMP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QBAHRV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QBAWBV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QBNAV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDLOV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESULOV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QBAFRV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QCOMPDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QCOMPUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDLAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESULAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QEVALSD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QEVALSI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLENGTH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLENSD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLENSI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QSCI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QSI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RTTI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('REPPERNCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RESLIMIT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKTAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKTAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKTAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKUP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKUPAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKUPAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RLINKUPAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RTTIINITMCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCLD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCLDLOL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCLDSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCLDLUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SDCCHUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SECTOR_ANGLE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SIMSG1', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SIMSG7', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SIMSG8', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BCCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BCCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BTSPSTCHBPCACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BTSPSTCHBPCLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHBPCACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHBPCLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_PSTCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_PSTCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_PSSDCCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_PSSDCCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_PSTCHBPCACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_PSTCHBPCLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_CBCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_CBCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_SDCCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_SDCCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRFRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRFRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRHRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRHRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRWBACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRWBLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHEFRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHEFRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHFRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHFRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHHRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHHRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BTSPSTCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BTSPSTCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BTSPSSDCCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BTSPSSDCCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLEVEL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLOW', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SPDCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDLAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESULAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSEVALSD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSEVALSI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSLENSD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSLENSI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSRAMPSD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSRAMPSI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSTHRASSV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSTHRV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_SDCCH_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SS_TCH_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('STIME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('STREAMSUP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('T3212', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TALIM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TCHFRUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TCHHRUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('THRAV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('THRVP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('THRDV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TMAXIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TRAFBLK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TSCSET1PAIRS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TSS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TX', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TXTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('UMFI_ACTIVE', sa.Text, autoincrement=False, nullable=True),
sa.Column('UMFI_IDLE', sa.Text, autoincrement=False, nullable=True),
sa.Column('EARFCNIDLE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EHPRIOTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ELPRIOTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EQRXLEVMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ERATPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDARFCNIDLE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GHPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GLPRIOTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GMEASTHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GRATPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GTRES', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MINCHBW', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PCID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PCIDG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PCIDP', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRIOCR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('UHPRIOTHR', sa.Text, autoincrement=False, nullable=True),
sa.Column('ULPRIOTHR', sa.Text, autoincrement=False, nullable=True),
sa.Column('UQRXLEVMIN', sa.Text, autoincrement=False, nullable=True),
sa.Column('URATPRIO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VAMOSCELLSTATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VGCHALLOC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VHOSUCCESS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMINT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DCDLACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DRX', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SPERIOD', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FSLOTS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DLPCG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DLPCE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DLPCE2A', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('INITDLPCG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('INITDLPCE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('INITDLPCE2A', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TBFULLIM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TBFDLLIM', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FASTRETLTE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('COVERAGEE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHPS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BCCHPSTYPE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRECCCH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DISABLEPERIODS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('COVERAGEU', sa.Text, autoincrement=False, nullable=True),
sa.Column('XRANGE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EPUACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PUTHRESHU', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PUTHRESHL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OSRTHRESH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('APSULPCACT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESGMSK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDES8PSK', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDES16QAM', sa.CHAR(length=250), autoincrement=False, nullable=True)],
schema='ericsson_cnaiv2'
)
op.create_table('MSC',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CAPLTCHEMER', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELLCONNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CGIANTRNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('EXPANDEDMNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAI', sa.Text, autoincrement=False, nullable=True),
sa.Column('LOCARNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSCG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSCPOOL_ID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NRIL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NRIV', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_FF_E911ENH', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MSCPOOL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OF_MSCNF861', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('VERSION', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ISMANAGED', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MESSAGE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSCBC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CIPHERALGALLOW', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('NREL',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NREL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('AWOFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BQOFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BQOFFSETAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BQOFFSETAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CAND', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('GPRSVALID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('HIHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('KHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('KOFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PROFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('OFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TRHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TROFFSET', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('OUTER_CELL',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('N_MSC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('N_MSCG', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NCS', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('OVERLAID_CELL',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWRMIN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSPWRT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSTXPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTCB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('DTCBHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('IHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LCOMPDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LCOMPUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LOLHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MAXIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MSTXPWR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NDIST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NNCELLS', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NUMINT', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QCOMPDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QCOMPUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QLIMULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QOFFSETULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SDCCHOL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRHRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRWBACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRHRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRWBLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BCCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_BCCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_CBCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_CBCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_STATE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_SDCCHACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_SDCCHLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHEFRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHEFRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHFRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHFRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHHRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHHRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRFRACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHAMRFRLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHBPCACL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SLCA_TCHBPCLVA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETDL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETDLAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETUL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETULAFR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSOFFSETULAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDLAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESDLAWB', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SSDESULAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESDLAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('QDESULAHR', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TAOL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TAOLHYST', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TCHFROL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TCHHROL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TMAXIHO', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('PRIORITY_PROFILE',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL1_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL2_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL3_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL4_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL5_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL6_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL7_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL8_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL9_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL10_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL11_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL12_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL13_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL14_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL15_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL16_INAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL1_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL2_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL3_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL4_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL5_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL6_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL7_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL8_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL9_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL10_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL11_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL12_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL13_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL14_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL15_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRL16_PROBF', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('PRI_PROFILE_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('SITE',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('ALTITUDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LATITUDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LONGITUDE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NOTE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SITE_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('TG',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SITE_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('TG_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('UTRAN_EXTERNAL_CELL',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CI', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('FDDARFCN', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('LAC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MRSL', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MCC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('MNC', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('RNCID', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('SCRCODE', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USEDFREQTHRESH2DECNO', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
op.create_table('UTRAN_NREL',
sa.Column('FileName', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('capabilities', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('varDateTime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('subnetwork', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('domain', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('set', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('USERDATA', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('BSC_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('CELL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
sa.Column('NREL_NAME', sa.CHAR(length=250), autoincrement=False, nullable=True),
schema='ericsson_cnaiv2'
)
def downgrade():
op.drop_table('UTRAN_NREL', schema='ericsson_cnaiv2')
op.drop_table('UTRAN_EXTERNAL_CELL', schema='ericsson_cnaiv2')
op.drop_table('TG', schema='ericsson_cnaiv2')
op.drop_table('SITE', schema='ericsson_cnaiv2')
op.drop_table('PRIORITY_PROFILE', schema='ericsson_cnaiv2')
op.drop_table('OVERLAID_CELL', schema='ericsson_cnaiv2')
op.drop_table('OUTER_CELL', schema='ericsson_cnaiv2')
op.drop_table('NREL', schema='ericsson_cnaiv2')
op.drop_table('MSC', schema='ericsson_cnaiv2')
op.drop_table('INTERNAL_CELL', schema='ericsson_cnaiv2')
op.drop_table('INNER_CELL', schema='ericsson_cnaiv2')
op.drop_table('EXTERNAL_CELL', schema='ericsson_cnaiv2')
op.drop_table('CHANNEL_GROUP', schema='ericsson_cnaiv2')
op.drop_table('BSC', schema='ericsson_cnaiv2')
| 81.467297
| 96
| 0.729411
| 13,677
| 103,382
| 5.481319
| 0.070776
| 0.126988
| 0.412709
| 0.476203
| 0.913737
| 0.913737
| 0.911669
| 0.908761
| 0.903425
| 0.903425
| 0
| 0.041717
| 0.095491
| 103,382
| 1,268
| 97
| 81.531546
| 0.759994
| 0.001673
| 0
| 0.284462
| 0
| 0
| 0.111724
| 0.000814
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001594
| false
| 0
| 0.00239
| 0
| 0.003984
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b28531cf649d2aff4e9fe096a0d19ab4cd82f388
| 2,191
|
py
|
Python
|
appengine/src/greenday_api/videocomment/containers.py
|
meedan/montage
|
4da0116931edc9af91f226876330645837dc9bcc
|
[
"Apache-2.0"
] | 6
|
2018-07-31T16:48:07.000Z
|
2020-02-01T03:17:51.000Z
|
appengine/src/greenday_api/videocomment/containers.py
|
meedan/montage
|
4da0116931edc9af91f226876330645837dc9bcc
|
[
"Apache-2.0"
] | 41
|
2018-08-07T16:43:07.000Z
|
2020-06-05T18:54:50.000Z
|
appengine/src/greenday_api/videocomment/containers.py
|
meedan/montage
|
4da0116931edc9af91f226876330645837dc9bcc
|
[
"Apache-2.0"
] | 1
|
2018-08-07T16:40:18.000Z
|
2018-08-07T16:40:18.000Z
|
"""
Request data containers for video comment API
"""
import endpoints
from protorpc import messages as api_messages
from .messages import (
VideoCommentRequestMessage, VideoCommentReplyMessage
)
CreateVideoRootCommentContainer = endpoints.ResourceContainer(
VideoCommentRequestMessage,
project_id=api_messages.IntegerField(
2, variant=api_messages.Variant.INT32),
youtube_id=api_messages.StringField(3)
)
UpdateVideoCommentContainer = endpoints.ResourceContainer(
VideoCommentRequestMessage,
project_id=api_messages.IntegerField(
2, variant=api_messages.Variant.INT32),
youtube_id=api_messages.StringField(3),
comment_id=api_messages.IntegerField(
4, variant=api_messages.Variant.INT32)
)
VideoCommentEntityContainer = endpoints.ResourceContainer(
api_messages.Message,
project_id=api_messages.IntegerField(
2, variant=api_messages.Variant.INT32),
youtube_id=api_messages.StringField(3),
comment_id=api_messages.IntegerField(
4, variant=api_messages.Variant.INT32)
)
CreateVideoCommentReplyContainer = endpoints.ResourceContainer(
VideoCommentReplyMessage,
project_id=api_messages.IntegerField(
2, variant=api_messages.Variant.INT32),
youtube_id=api_messages.StringField(3),
comment_id=api_messages.IntegerField(
4, variant=api_messages.Variant.INT32)
)
UpdateVideoCommentReplyContainer = endpoints.ResourceContainer(
VideoCommentReplyMessage,
project_id=api_messages.IntegerField(
2, variant=api_messages.Variant.INT32),
youtube_id=api_messages.StringField(3),
comment_id=api_messages.IntegerField(
4, variant=api_messages.Variant.INT32),
reply_id=api_messages.IntegerField(
5, variant=api_messages.Variant.INT32)
)
VideoCommentReplyContainer = endpoints.ResourceContainer(
api_messages.Message,
project_id=api_messages.IntegerField(
2, variant=api_messages.Variant.INT32),
youtube_id=api_messages.StringField(3),
comment_id=api_messages.IntegerField(
4, variant=api_messages.Variant.INT32),
reply_id=api_messages.IntegerField(
5, variant=api_messages.Variant.INT32)
)
| 32.701493
| 63
| 0.770881
| 228
| 2,191
| 7.171053
| 0.153509
| 0.235474
| 0.15107
| 0.198777
| 0.794495
| 0.794495
| 0.794495
| 0.794495
| 0.794495
| 0.794495
| 0
| 0.024038
| 0.145596
| 2,191
| 66
| 64
| 33.19697
| 0.849359
| 0.020539
| 0
| 0.672727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.054545
| 0
| 0.054545
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2ada88a8fdd9b0b7caf243e814569b7026c4d48
| 42,334
|
py
|
Python
|
sdk/python/pulumi_azure/network/firewall_policy.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/network/firewall_policy.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/network/firewall_policy.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['FirewallPolicyArgs', 'FirewallPolicy']
@pulumi.input_type
class FirewallPolicyArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
base_policy_id: Optional[pulumi.Input[str]] = None,
dns: Optional[pulumi.Input['FirewallPolicyDnsArgs']] = None,
identity: Optional[pulumi.Input['FirewallPolicyIdentityArgs']] = None,
intrusion_detection: Optional[pulumi.Input['FirewallPolicyIntrusionDetectionArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intelligence_allowlist: Optional[pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs']] = None,
threat_intelligence_mode: Optional[pulumi.Input[str]] = None,
tls_certificate: Optional[pulumi.Input['FirewallPolicyTlsCertificateArgs']] = None):
"""
The set of arguments for constructing a FirewallPolicy resource.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[str] base_policy_id: The ID of the base Firewall Policy.
:param pulumi.Input['FirewallPolicyDnsArgs'] dns: A `dns` block as defined below.
:param pulumi.Input['FirewallPolicyIdentityArgs'] identity: An `identity` block as defined below. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input['FirewallPolicyIntrusionDetectionArgs'] intrusion_detection: A `intrusion_detection` block as defined below.
:param pulumi.Input[str] location: The Azure Region where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[str] name: The name which should be used for this Firewall Policy. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of private IP ranges to which traffic will not be SNAT.
:param pulumi.Input[str] sku: The SKU Tier of the Firewall Policy. Possible values are `Standard`, `Premium`. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Firewall Policy.
:param pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs'] threat_intelligence_allowlist: A `threat_intelligence_allowlist` block as defined below.
:param pulumi.Input[str] threat_intelligence_mode: The operation mode for Threat Intelligence. Possible values are `Alert`, `Deny` and `Off`. Defaults to `Alert`.
:param pulumi.Input['FirewallPolicyTlsCertificateArgs'] tls_certificate: A `tls_certificate` block as defined below.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if base_policy_id is not None:
pulumi.set(__self__, "base_policy_id", base_policy_id)
if dns is not None:
pulumi.set(__self__, "dns", dns)
if identity is not None:
pulumi.set(__self__, "identity", identity)
if intrusion_detection is not None:
pulumi.set(__self__, "intrusion_detection", intrusion_detection)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_ranges is not None:
pulumi.set(__self__, "private_ip_ranges", private_ip_ranges)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if threat_intelligence_allowlist is not None:
pulumi.set(__self__, "threat_intelligence_allowlist", threat_intelligence_allowlist)
if threat_intelligence_mode is not None:
pulumi.set(__self__, "threat_intelligence_mode", threat_intelligence_mode)
if tls_certificate is not None:
pulumi.set(__self__, "tls_certificate", tls_certificate)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="basePolicyId")
def base_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the base Firewall Policy.
"""
return pulumi.get(self, "base_policy_id")
@base_policy_id.setter
def base_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "base_policy_id", value)
@property
@pulumi.getter
def dns(self) -> Optional[pulumi.Input['FirewallPolicyDnsArgs']]:
"""
A `dns` block as defined below.
"""
return pulumi.get(self, "dns")
@dns.setter
def dns(self, value: Optional[pulumi.Input['FirewallPolicyDnsArgs']]):
pulumi.set(self, "dns", value)
@property
@pulumi.getter
def identity(self) -> Optional[pulumi.Input['FirewallPolicyIdentityArgs']]:
"""
An `identity` block as defined below. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "identity")
@identity.setter
def identity(self, value: Optional[pulumi.Input['FirewallPolicyIdentityArgs']]):
pulumi.set(self, "identity", value)
@property
@pulumi.getter(name="intrusionDetection")
def intrusion_detection(self) -> Optional[pulumi.Input['FirewallPolicyIntrusionDetectionArgs']]:
"""
A `intrusion_detection` block as defined below.
"""
return pulumi.get(self, "intrusion_detection")
@intrusion_detection.setter
def intrusion_detection(self, value: Optional[pulumi.Input['FirewallPolicyIntrusionDetectionArgs']]):
pulumi.set(self, "intrusion_detection", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The Azure Region where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Firewall Policy. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateIpRanges")
def private_ip_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of private IP ranges to which traffic will not be SNAT.
"""
return pulumi.get(self, "private_ip_ranges")
@private_ip_ranges.setter
def private_ip_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "private_ip_ranges", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input[str]]:
"""
The SKU Tier of the Firewall Policy. Possible values are `Standard`, `Premium`. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags which should be assigned to the Firewall Policy.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="threatIntelligenceAllowlist")
def threat_intelligence_allowlist(self) -> Optional[pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs']]:
"""
A `threat_intelligence_allowlist` block as defined below.
"""
return pulumi.get(self, "threat_intelligence_allowlist")
@threat_intelligence_allowlist.setter
def threat_intelligence_allowlist(self, value: Optional[pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs']]):
pulumi.set(self, "threat_intelligence_allowlist", value)
@property
@pulumi.getter(name="threatIntelligenceMode")
def threat_intelligence_mode(self) -> Optional[pulumi.Input[str]]:
"""
The operation mode for Threat Intelligence. Possible values are `Alert`, `Deny` and `Off`. Defaults to `Alert`.
"""
return pulumi.get(self, "threat_intelligence_mode")
@threat_intelligence_mode.setter
def threat_intelligence_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "threat_intelligence_mode", value)
@property
@pulumi.getter(name="tlsCertificate")
def tls_certificate(self) -> Optional[pulumi.Input['FirewallPolicyTlsCertificateArgs']]:
"""
A `tls_certificate` block as defined below.
"""
return pulumi.get(self, "tls_certificate")
@tls_certificate.setter
def tls_certificate(self, value: Optional[pulumi.Input['FirewallPolicyTlsCertificateArgs']]):
pulumi.set(self, "tls_certificate", value)
@pulumi.input_type
class _FirewallPolicyState:
def __init__(__self__, *,
base_policy_id: Optional[pulumi.Input[str]] = None,
child_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dns: Optional[pulumi.Input['FirewallPolicyDnsArgs']] = None,
firewalls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
identity: Optional[pulumi.Input['FirewallPolicyIdentityArgs']] = None,
intrusion_detection: Optional[pulumi.Input['FirewallPolicyIntrusionDetectionArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
rule_collection_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intelligence_allowlist: Optional[pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs']] = None,
threat_intelligence_mode: Optional[pulumi.Input[str]] = None,
tls_certificate: Optional[pulumi.Input['FirewallPolicyTlsCertificateArgs']] = None):
"""
Input properties used for looking up and filtering FirewallPolicy resources.
:param pulumi.Input[str] base_policy_id: The ID of the base Firewall Policy.
:param pulumi.Input[Sequence[pulumi.Input[str]]] child_policies: A list of reference to child Firewall Policies of this Firewall Policy.
:param pulumi.Input['FirewallPolicyDnsArgs'] dns: A `dns` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] firewalls: A list of references to Azure Firewalls that this Firewall Policy is associated with.
:param pulumi.Input['FirewallPolicyIdentityArgs'] identity: An `identity` block as defined below. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input['FirewallPolicyIntrusionDetectionArgs'] intrusion_detection: A `intrusion_detection` block as defined below.
:param pulumi.Input[str] location: The Azure Region where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[str] name: The name which should be used for this Firewall Policy. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of private IP ranges to which traffic will not be SNAT.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] rule_collection_groups: A list of references to Firewall Policy Rule Collection Groups that belongs to this Firewall Policy.
:param pulumi.Input[str] sku: The SKU Tier of the Firewall Policy. Possible values are `Standard`, `Premium`. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Firewall Policy.
:param pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs'] threat_intelligence_allowlist: A `threat_intelligence_allowlist` block as defined below.
:param pulumi.Input[str] threat_intelligence_mode: The operation mode for Threat Intelligence. Possible values are `Alert`, `Deny` and `Off`. Defaults to `Alert`.
:param pulumi.Input['FirewallPolicyTlsCertificateArgs'] tls_certificate: A `tls_certificate` block as defined below.
"""
if base_policy_id is not None:
pulumi.set(__self__, "base_policy_id", base_policy_id)
if child_policies is not None:
pulumi.set(__self__, "child_policies", child_policies)
if dns is not None:
pulumi.set(__self__, "dns", dns)
if firewalls is not None:
pulumi.set(__self__, "firewalls", firewalls)
if identity is not None:
pulumi.set(__self__, "identity", identity)
if intrusion_detection is not None:
pulumi.set(__self__, "intrusion_detection", intrusion_detection)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_ranges is not None:
pulumi.set(__self__, "private_ip_ranges", private_ip_ranges)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if rule_collection_groups is not None:
pulumi.set(__self__, "rule_collection_groups", rule_collection_groups)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if threat_intelligence_allowlist is not None:
pulumi.set(__self__, "threat_intelligence_allowlist", threat_intelligence_allowlist)
if threat_intelligence_mode is not None:
pulumi.set(__self__, "threat_intelligence_mode", threat_intelligence_mode)
if tls_certificate is not None:
pulumi.set(__self__, "tls_certificate", tls_certificate)
@property
@pulumi.getter(name="basePolicyId")
def base_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the base Firewall Policy.
"""
return pulumi.get(self, "base_policy_id")
@base_policy_id.setter
def base_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "base_policy_id", value)
@property
@pulumi.getter(name="childPolicies")
def child_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of reference to child Firewall Policies of this Firewall Policy.
"""
return pulumi.get(self, "child_policies")
@child_policies.setter
def child_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "child_policies", value)
@property
@pulumi.getter
def dns(self) -> Optional[pulumi.Input['FirewallPolicyDnsArgs']]:
"""
A `dns` block as defined below.
"""
return pulumi.get(self, "dns")
@dns.setter
def dns(self, value: Optional[pulumi.Input['FirewallPolicyDnsArgs']]):
pulumi.set(self, "dns", value)
@property
@pulumi.getter
def firewalls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of references to Azure Firewalls that this Firewall Policy is associated with.
"""
return pulumi.get(self, "firewalls")
@firewalls.setter
def firewalls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "firewalls", value)
@property
@pulumi.getter
def identity(self) -> Optional[pulumi.Input['FirewallPolicyIdentityArgs']]:
"""
An `identity` block as defined below. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "identity")
@identity.setter
def identity(self, value: Optional[pulumi.Input['FirewallPolicyIdentityArgs']]):
pulumi.set(self, "identity", value)
@property
@pulumi.getter(name="intrusionDetection")
def intrusion_detection(self) -> Optional[pulumi.Input['FirewallPolicyIntrusionDetectionArgs']]:
"""
A `intrusion_detection` block as defined below.
"""
return pulumi.get(self, "intrusion_detection")
@intrusion_detection.setter
def intrusion_detection(self, value: Optional[pulumi.Input['FirewallPolicyIntrusionDetectionArgs']]):
pulumi.set(self, "intrusion_detection", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The Azure Region where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Firewall Policy. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateIpRanges")
def private_ip_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of private IP ranges to which traffic will not be SNAT.
"""
return pulumi.get(self, "private_ip_ranges")
@private_ip_ranges.setter
def private_ip_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "private_ip_ranges", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="ruleCollectionGroups")
def rule_collection_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of references to Firewall Policy Rule Collection Groups that belongs to this Firewall Policy.
"""
return pulumi.get(self, "rule_collection_groups")
@rule_collection_groups.setter
def rule_collection_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "rule_collection_groups", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input[str]]:
"""
The SKU Tier of the Firewall Policy. Possible values are `Standard`, `Premium`. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags which should be assigned to the Firewall Policy.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="threatIntelligenceAllowlist")
def threat_intelligence_allowlist(self) -> Optional[pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs']]:
"""
A `threat_intelligence_allowlist` block as defined below.
"""
return pulumi.get(self, "threat_intelligence_allowlist")
@threat_intelligence_allowlist.setter
def threat_intelligence_allowlist(self, value: Optional[pulumi.Input['FirewallPolicyThreatIntelligenceAllowlistArgs']]):
pulumi.set(self, "threat_intelligence_allowlist", value)
@property
@pulumi.getter(name="threatIntelligenceMode")
def threat_intelligence_mode(self) -> Optional[pulumi.Input[str]]:
"""
The operation mode for Threat Intelligence. Possible values are `Alert`, `Deny` and `Off`. Defaults to `Alert`.
"""
return pulumi.get(self, "threat_intelligence_mode")
@threat_intelligence_mode.setter
def threat_intelligence_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "threat_intelligence_mode", value)
@property
@pulumi.getter(name="tlsCertificate")
def tls_certificate(self) -> Optional[pulumi.Input['FirewallPolicyTlsCertificateArgs']]:
"""
A `tls_certificate` block as defined below.
"""
return pulumi.get(self, "tls_certificate")
@tls_certificate.setter
def tls_certificate(self, value: Optional[pulumi.Input['FirewallPolicyTlsCertificateArgs']]):
pulumi.set(self, "tls_certificate", value)
class FirewallPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
base_policy_id: Optional[pulumi.Input[str]] = None,
dns: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyDnsArgs']]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyIdentityArgs']]] = None,
intrusion_detection: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyIntrusionDetectionArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intelligence_allowlist: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyThreatIntelligenceAllowlistArgs']]] = None,
threat_intelligence_mode: Optional[pulumi.Input[str]] = None,
tls_certificate: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyTlsCertificateArgs']]] = None,
__props__=None):
"""
Manages a Firewall Policy.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example = azure.network.FirewallPolicy("example",
location="West Europe",
resource_group_name="example")
```
## Import
Firewall Policies can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/firewallPolicy:FirewallPolicy example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Network/firewallPolicies/policy1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] base_policy_id: The ID of the base Firewall Policy.
:param pulumi.Input[pulumi.InputType['FirewallPolicyDnsArgs']] dns: A `dns` block as defined below.
:param pulumi.Input[pulumi.InputType['FirewallPolicyIdentityArgs']] identity: An `identity` block as defined below. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[pulumi.InputType['FirewallPolicyIntrusionDetectionArgs']] intrusion_detection: A `intrusion_detection` block as defined below.
:param pulumi.Input[str] location: The Azure Region where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[str] name: The name which should be used for this Firewall Policy. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of private IP ranges to which traffic will not be SNAT.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[str] sku: The SKU Tier of the Firewall Policy. Possible values are `Standard`, `Premium`. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Firewall Policy.
:param pulumi.Input[pulumi.InputType['FirewallPolicyThreatIntelligenceAllowlistArgs']] threat_intelligence_allowlist: A `threat_intelligence_allowlist` block as defined below.
:param pulumi.Input[str] threat_intelligence_mode: The operation mode for Threat Intelligence. Possible values are `Alert`, `Deny` and `Off`. Defaults to `Alert`.
:param pulumi.Input[pulumi.InputType['FirewallPolicyTlsCertificateArgs']] tls_certificate: A `tls_certificate` block as defined below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: FirewallPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Firewall Policy.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example = azure.network.FirewallPolicy("example",
location="West Europe",
resource_group_name="example")
```
## Import
Firewall Policies can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/firewallPolicy:FirewallPolicy example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Network/firewallPolicies/policy1
```
:param str resource_name: The name of the resource.
:param FirewallPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(FirewallPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
base_policy_id: Optional[pulumi.Input[str]] = None,
dns: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyDnsArgs']]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyIdentityArgs']]] = None,
intrusion_detection: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyIntrusionDetectionArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intelligence_allowlist: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyThreatIntelligenceAllowlistArgs']]] = None,
threat_intelligence_mode: Optional[pulumi.Input[str]] = None,
tls_certificate: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyTlsCertificateArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = FirewallPolicyArgs.__new__(FirewallPolicyArgs)
__props__.__dict__["base_policy_id"] = base_policy_id
__props__.__dict__["dns"] = dns
__props__.__dict__["identity"] = identity
__props__.__dict__["intrusion_detection"] = intrusion_detection
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["private_ip_ranges"] = private_ip_ranges
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["threat_intelligence_allowlist"] = threat_intelligence_allowlist
__props__.__dict__["threat_intelligence_mode"] = threat_intelligence_mode
__props__.__dict__["tls_certificate"] = tls_certificate
__props__.__dict__["child_policies"] = None
__props__.__dict__["firewalls"] = None
__props__.__dict__["rule_collection_groups"] = None
super(FirewallPolicy, __self__).__init__(
'azure:network/firewallPolicy:FirewallPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
base_policy_id: Optional[pulumi.Input[str]] = None,
child_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dns: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyDnsArgs']]] = None,
firewalls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyIdentityArgs']]] = None,
intrusion_detection: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyIntrusionDetectionArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
rule_collection_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
threat_intelligence_allowlist: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyThreatIntelligenceAllowlistArgs']]] = None,
threat_intelligence_mode: Optional[pulumi.Input[str]] = None,
tls_certificate: Optional[pulumi.Input[pulumi.InputType['FirewallPolicyTlsCertificateArgs']]] = None) -> 'FirewallPolicy':
"""
Get an existing FirewallPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] base_policy_id: The ID of the base Firewall Policy.
:param pulumi.Input[Sequence[pulumi.Input[str]]] child_policies: A list of reference to child Firewall Policies of this Firewall Policy.
:param pulumi.Input[pulumi.InputType['FirewallPolicyDnsArgs']] dns: A `dns` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] firewalls: A list of references to Azure Firewalls that this Firewall Policy is associated with.
:param pulumi.Input[pulumi.InputType['FirewallPolicyIdentityArgs']] identity: An `identity` block as defined below. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[pulumi.InputType['FirewallPolicyIntrusionDetectionArgs']] intrusion_detection: A `intrusion_detection` block as defined below.
:param pulumi.Input[str] location: The Azure Region where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[str] name: The name which should be used for this Firewall Policy. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] private_ip_ranges: A list of private IP ranges to which traffic will not be SNAT.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] rule_collection_groups: A list of references to Firewall Policy Rule Collection Groups that belongs to this Firewall Policy.
:param pulumi.Input[str] sku: The SKU Tier of the Firewall Policy. Possible values are `Standard`, `Premium`. Changing this forces a new Firewall Policy to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to the Firewall Policy.
:param pulumi.Input[pulumi.InputType['FirewallPolicyThreatIntelligenceAllowlistArgs']] threat_intelligence_allowlist: A `threat_intelligence_allowlist` block as defined below.
:param pulumi.Input[str] threat_intelligence_mode: The operation mode for Threat Intelligence. Possible values are `Alert`, `Deny` and `Off`. Defaults to `Alert`.
:param pulumi.Input[pulumi.InputType['FirewallPolicyTlsCertificateArgs']] tls_certificate: A `tls_certificate` block as defined below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _FirewallPolicyState.__new__(_FirewallPolicyState)
__props__.__dict__["base_policy_id"] = base_policy_id
__props__.__dict__["child_policies"] = child_policies
__props__.__dict__["dns"] = dns
__props__.__dict__["firewalls"] = firewalls
__props__.__dict__["identity"] = identity
__props__.__dict__["intrusion_detection"] = intrusion_detection
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["private_ip_ranges"] = private_ip_ranges
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["rule_collection_groups"] = rule_collection_groups
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["threat_intelligence_allowlist"] = threat_intelligence_allowlist
__props__.__dict__["threat_intelligence_mode"] = threat_intelligence_mode
__props__.__dict__["tls_certificate"] = tls_certificate
return FirewallPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="basePolicyId")
def base_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the base Firewall Policy.
"""
return pulumi.get(self, "base_policy_id")
@property
@pulumi.getter(name="childPolicies")
def child_policies(self) -> pulumi.Output[Sequence[str]]:
"""
A list of reference to child Firewall Policies of this Firewall Policy.
"""
return pulumi.get(self, "child_policies")
@property
@pulumi.getter
def dns(self) -> pulumi.Output[Optional['outputs.FirewallPolicyDns']]:
"""
A `dns` block as defined below.
"""
return pulumi.get(self, "dns")
@property
@pulumi.getter
def firewalls(self) -> pulumi.Output[Sequence[str]]:
"""
A list of references to Azure Firewalls that this Firewall Policy is associated with.
"""
return pulumi.get(self, "firewalls")
@property
@pulumi.getter
def identity(self) -> pulumi.Output[Optional['outputs.FirewallPolicyIdentity']]:
"""
An `identity` block as defined below. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter(name="intrusionDetection")
def intrusion_detection(self) -> pulumi.Output[Optional['outputs.FirewallPolicyIntrusionDetection']]:
"""
A `intrusion_detection` block as defined below.
"""
return pulumi.get(self, "intrusion_detection")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The Azure Region where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this Firewall Policy. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateIpRanges")
def private_ip_ranges(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of private IP ranges to which traffic will not be SNAT.
"""
return pulumi.get(self, "private_ip_ranges")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Firewall Policy should exist. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="ruleCollectionGroups")
def rule_collection_groups(self) -> pulumi.Output[Sequence[str]]:
"""
A list of references to Firewall Policy Rule Collection Groups that belongs to this Firewall Policy.
"""
return pulumi.get(self, "rule_collection_groups")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[str]:
"""
The SKU Tier of the Firewall Policy. Possible values are `Standard`, `Premium`. Changing this forces a new Firewall Policy to be created.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags which should be assigned to the Firewall Policy.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="threatIntelligenceAllowlist")
def threat_intelligence_allowlist(self) -> pulumi.Output[Optional['outputs.FirewallPolicyThreatIntelligenceAllowlist']]:
"""
A `threat_intelligence_allowlist` block as defined below.
"""
return pulumi.get(self, "threat_intelligence_allowlist")
@property
@pulumi.getter(name="threatIntelligenceMode")
def threat_intelligence_mode(self) -> pulumi.Output[Optional[str]]:
"""
The operation mode for Threat Intelligence. Possible values are `Alert`, `Deny` and `Off`. Defaults to `Alert`.
"""
return pulumi.get(self, "threat_intelligence_mode")
@property
@pulumi.getter(name="tlsCertificate")
def tls_certificate(self) -> pulumi.Output[Optional['outputs.FirewallPolicyTlsCertificate']]:
"""
A `tls_certificate` block as defined below.
"""
return pulumi.get(self, "tls_certificate")
| 50.578256
| 205
| 0.680351
| 4,834
| 42,334
| 5.756103
| 0.046752
| 0.092902
| 0.086038
| 0.040323
| 0.923235
| 0.907601
| 0.892111
| 0.885786
| 0.882947
| 0.875184
| 0
| 0.002086
| 0.21876
| 42,334
| 836
| 206
| 50.638756
| 0.839234
| 0.327515
| 0
| 0.815574
| 1
| 0
| 0.15532
| 0.091829
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165984
| false
| 0.002049
| 0.014344
| 0
| 0.280738
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a27d98c43302ada34f88315457728ed4d06fc256
| 2,061
|
py
|
Python
|
config/dev.py
|
a1136395507/Blog
|
e890dbe24bd2c3a82dad55e90f717db59a3e51a1
|
[
"Unlicense"
] | null | null | null |
config/dev.py
|
a1136395507/Blog
|
e890dbe24bd2c3a82dad55e90f717db59a3e51a1
|
[
"Unlicense"
] | null | null | null |
config/dev.py
|
a1136395507/Blog
|
e890dbe24bd2c3a82dad55e90f717db59a3e51a1
|
[
"Unlicense"
] | null | null | null |
import pymysql
from dbutils.pooled_db import PooledDB
class DBINFO_MYSQL:
pass
class DBINFO_REDIS:
pass
class DB_USER_INFO_MYSQL:
PYMYSQL_POOL = PooledDB(
creator=pymysql, # 使用链接数据库的模块
maxconnections=15, # 连接池允许的最大连接数,0和None表示不限制连接数
mincached=0, # 初始化时,链接池中至少创建的空闲的链接,0表示不创建
maxcached=6, # 链接池中最多闲置的链接,0和None不限制
maxshared=0,
# 链接池中最多共享的链接数量,0和None表示全部共享。PS: 无用,因为pymysql和MySQLdb等模块的 threadsafety都为1,所有值无论设置为多少,_maxcached永远为0,所以永远是所有链接都共享。
blocking=True, # 连接池中如果没有可用连接后,是否阻塞等待。True,等待;False,不等待然后报错
maxusage=None, # 一个链接最多被重复使用的次数,None表示无限制
setsession=[], # 开始会话前执行的命令列表。如:["set datestyle to ...", "set time zone ..."]
ping=1,
# ping MySQL服务端,检查是否服务可用。# 如:0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, 7 = always
host='1277.0.0.1',
port=3306,
user='sm_insight',
password='ZwLcINsightk3AALPkLGE',
database='sm_webapp_sl', # 链接的数据库的名字
charset='utf8'
)
class DB_SM_PRODUCT_MYSQL:
PYMYSQL_POOL = PooledDB(
creator=pymysql, # 使用链接数据库的模块
maxconnections=15, # 连接池允许的最大连接数,0和None表示不限制连接数
mincached=0, # 初始化时,链接池中至少创建的空闲的链接,0表示不创建
maxcached=6, # 链接池中最多闲置的链接,0和None不限制
maxshared=0,
# 链接池中最多共享的链接数量,0和None表示全部共享。PS: 无用,因为pymysql和MySQLdb等模块的 threadsafety都为1,所有值无论设置为多少,_maxcached永远为0,所以永远是所有链接都共享。
blocking=True, # 连接池中如果没有可用连接后,是否阻塞等待。True,等待;False,不等待然后报错
maxusage=None, # 一个链接最多被重复使用的次数,None表示无限制
setsession=[], # 开始会话前执行的命令列表。如:["set datestyle to ...", "set time zone ..."]
ping=1,
# ping MySQL服务端,检查是否服务可用。# 如:0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, 7 = always
host='172.16.6.247',
port=3306,
user='sm_insight',
password='ZwLcINsightk3AALPkLGE',
database='sm_product', # 链接的数据库的名字
charset='utf8'
)
| 37.472727
| 165
| 0.655992
| 223
| 2,061
| 5.982063
| 0.434978
| 0.014993
| 0.023988
| 0.035982
| 0.85907
| 0.85907
| 0.85907
| 0.85907
| 0.85907
| 0.769115
| 0
| 0.039566
| 0.239689
| 2,061
| 54
| 166
| 38.166667
| 0.811742
| 0.477923
| 0
| 0.714286
| 0
| 0
| 0.108262
| 0.039886
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.095238
| 0.047619
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
a28f28fc7f3ca3117e6ebb0eb36ad19e955cb7e0
| 1,308
|
py
|
Python
|
equal_sides_of_an_array/test_equal_sides_of_an_array.py
|
guilhermegouw/codewars
|
8ad98bb661c9b47b9d9683df07cd9672ff80731c
|
[
"MIT"
] | null | null | null |
equal_sides_of_an_array/test_equal_sides_of_an_array.py
|
guilhermegouw/codewars
|
8ad98bb661c9b47b9d9683df07cd9672ff80731c
|
[
"MIT"
] | null | null | null |
equal_sides_of_an_array/test_equal_sides_of_an_array.py
|
guilhermegouw/codewars
|
8ad98bb661c9b47b9d9683df07cd9672ff80731c
|
[
"MIT"
] | null | null | null |
"""
test.assert_equals(find_even_index([1,2,3,4,3,2,1]),3)
test.assert_equals(find_even_index([1,100,50,-51,1,1]),1,)
test.assert_equals(find_even_index([1,2,3,4,5,6]),-1)
test.assert_equals(find_even_index([20,10,30,10,10,15,35]),3)
test.assert_equals(find_even_index([20,10,-80,10,10,15,35]),0)
test.assert_equals(find_even_index([10,-80,10,10,15,35,20]),6)
test.assert_equals(find_even_index(list(range(1,100))),-1)
test.assert_equals(find_even_index([0,0,0,0,0]),0,"Should pick the first index if more cases are valid")
test.assert_equals(find_even_index([-1,-2,-3,-4,-3,-2,-1]),3)
test.assert_equals(find_even_index(list(range(-100,-1))),-1)
"""
from .equal_sides_of_an_array import find_even_index
def test_find_even_index():
assert find_even_index([1, 2, 3, 4, 3, 2, 1]) == 3
assert find_even_index([1, 100, 50, -51, 1, 1]) == 1
assert find_even_index([1, 2, 3, 4, 5, 6]) == -1
assert find_even_index([20, 10, 30, 10, 10, 15, 35]) == 3
assert find_even_index([20, 10, -80, 10, 10, 15, 35]) == 0
assert find_even_index([10, -80, 10, 10, 15, 35, 20]) == 6
assert find_even_index(list(range(1, 100))) == -1
assert find_even_index([0, 0, 0, 0, 0]) == 0
assert find_even_index([-1, -2, -3, -4, -3, -2, -1]) == 3
assert find_even_index(list(range(-100, -1))) == -1
| 48.444444
| 104
| 0.664373
| 265
| 1,308
| 3.056604
| 0.154717
| 0.217284
| 0.353086
| 0.246914
| 0.871605
| 0.85679
| 0.814815
| 0.795062
| 0.62716
| 0.580247
| 0
| 0.164788
| 0.118502
| 1,308
| 26
| 105
| 50.307692
| 0.537728
| 0.490826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.833333
| 1
| 0.083333
| true
| 0
| 0.083333
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a2d7595a68a2830e2373a25fd14af57fb8772651
| 6,694
|
py
|
Python
|
tests/test_syntax.py
|
Edward-Knight/rich
|
b0a68d3341fb0eaa47be20facf54c4646375572b
|
[
"MIT"
] | null | null | null |
tests/test_syntax.py
|
Edward-Knight/rich
|
b0a68d3341fb0eaa47be20facf54c4646375572b
|
[
"MIT"
] | null | null | null |
tests/test_syntax.py
|
Edward-Knight/rich
|
b0a68d3341fb0eaa47be20facf54c4646375572b
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import sys
import os, tempfile
import pytest
from .render import render
from rich.panel import Panel
from rich.style import Style
from rich.syntax import Syntax, ANSISyntaxTheme
CODE = '''
def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]:
"""Iterate and generate a tuple with a flag for first and last value."""
iter_values = iter(values)
try:
previous_value = next(iter_values)
except StopIteration:
return
first = True
for value in iter_values:
yield first, False, previous_value
first = False
previous_value = value
yield first, True, previous_value
'''
def test_python_render():
syntax = Panel.fit(
Syntax(
CODE,
lexer_name="python",
line_numbers=True,
line_range=(2, 10),
theme="foo",
code_width=60,
word_wrap=True,
)
)
rendered_syntax = render(syntax)
print(repr(rendered_syntax))
expected = '╭────────────────────────────────────────────────────────────────╮\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 2 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[3;38;2;186;33;33;48;2;248;248;248m"""Iterate and generate a tuple with a flag for first \x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[48;2;248;248;248m \x1b[0m\x1b[3;38;2;186;33;33;48;2;248;248;248mand last value."""\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 3 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248miter_values\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;102;102;102;48;2;248;248;248m=\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;128;0;48;2;248;248;248miter\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m(\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248mvalues\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m)\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 4 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;0;128;0;48;2;248;248;248mtry\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m:\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 5 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248mprevious_value\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;102;102;102;48;2;248;248;248m=\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;128;0;48;2;248;248;248mnext\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m(\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248miter_values\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m)\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 6 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;0;128;0;48;2;248;248;248mexcept\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;210;65;58;48;2;248;248;248mStopIteration\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m:\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 7 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;0;128;0;48;2;248;248;248mreturn\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 8 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248mfirst\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;102;102;102;48;2;248;248;248m=\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;0;128;0;48;2;248;248;248mTrue\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m 9 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;0;128;0;48;2;248;248;248mfor\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248mvalue\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;170;34;255;48;2;248;248;248min\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248miter_values\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m:\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n│\x1b[1;38;2;24;24;24;48;2;248;248;248m \x1b[0m\x1b[38;2;173;173;173;48;2;248;248;248m10 \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;0;128;0;48;2;248;248;248myield\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248mfirst\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m,\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[1;38;2;0;128;0;48;2;248;248;248mFalse\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m,\x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248m \x1b[0m\x1b[38;2;0;0;0;48;2;248;248;248mprevious_value\x1b[0m\x1b[48;2;248;248;248m \x1b[0m│\n╰────────────────────────────────────────────────────────────────╯\n'
assert rendered_syntax == expected
def test_ansi_theme():
style = Style(color="red")
theme = ANSISyntaxTheme({("foo", "bar"): style})
assert theme.get_style_for_token(("foo", "bar", "baz")) == style
assert theme.get_background_style() == Style()
@pytest.mark.skipif(sys.platform == "win32", reason="permissions error on Windows")
def test_from_file():
fh, path = tempfile.mkstemp("example.py")
try:
os.write(fh, b"import this\n")
syntax = Syntax.from_path(path)
assert syntax.lexer_name == "Python"
assert syntax.code == "import this\n"
finally:
os.remove(path)
@pytest.mark.skipif(sys.platform == "win32", reason="permissions error on Windows")
def test_from_file_unknown_lexer():
fh, path = tempfile.mkstemp("example.nosuchtype")
try:
os.write(fh, b"import this\n")
syntax = Syntax.from_path(path)
assert syntax.lexer_name == "default"
assert syntax.code == "import this\n"
finally:
os.remove(path)
if __name__ == "__main__":
syntax = Panel.fit(
Syntax(
CODE,
lexer_name="python",
line_numbers=True,
line_range=(2, 10),
theme="foo",
code_width=60,
word_wrap=True,
)
)
rendered = render(markdown)
print(rendered)
print(repr(rendered))
| 70.463158
| 4,286
| 0.609202
| 1,423
| 6,694
| 2.934645
| 0.1026
| 0.061063
| 0.122126
| 0.18319
| 0.750479
| 0.736111
| 0.734195
| 0.734195
| 0.734195
| 0.734195
| 0
| 0.338144
| 0.161488
| 6,694
| 94
| 4,287
| 71.212766
| 0.378764
| 0.001793
| 0
| 0.423077
| 0
| 0.012821
| 0.738174
| 0.572605
| 0
| 0
| 0
| 0
| 0.089744
| 1
| 0.051282
| false
| 0
| 0.141026
| 0
| 0.205128
| 0.038462
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
0c31f5f5f21753b4eb5bd64a0178f841259a8e80
| 10,868
|
py
|
Python
|
tests/core/tests/authorization.py
|
SeanHayes/django-tastypie
|
a9e5b614b5e9d4b4d570b10a2154f8d2dd547a7a
|
[
"BSD-3-Clause"
] | 1
|
2015-11-08T11:42:07.000Z
|
2015-11-08T11:42:07.000Z
|
tests/core/tests/authorization.py
|
SeanHayes/django-tastypie
|
a9e5b614b5e9d4b4d570b10a2154f8d2dd547a7a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/core/tests/authorization.py
|
SeanHayes/django-tastypie
|
a9e5b614b5e9d4b4d570b10a2154f8d2dd547a7a
|
[
"BSD-3-Clause"
] | 1
|
2019-09-29T04:13:39.000Z
|
2019-09-29T04:13:39.000Z
|
from django.test import TestCase
from django.http import HttpRequest
from django.contrib.auth.models import User, Permission
from core.models import Note
from tastypie.authorization import Authorization, ReadOnlyAuthorization, DjangoAuthorization
from tastypie.exceptions import Unauthorized
from tastypie import fields
from tastypie.resources import Resource, ModelResource
class NoRulesNoteResource(ModelResource):
class Meta:
resource_name = 'notes'
queryset = Note.objects.filter(is_active=True)
authorization = Authorization()
class ReadOnlyNoteResource(ModelResource):
class Meta:
resource_name = 'notes'
queryset = Note.objects.filter(is_active=True)
authorization = ReadOnlyAuthorization()
class DjangoNoteResource(ModelResource):
class Meta:
resource_name = 'notes'
queryset = Note.objects.filter(is_active=True)
authorization = DjangoAuthorization()
class NotAModel(object):
name = 'Foo'
class NotAModelResource(Resource):
name = fields.CharField(attribute='name')
class Meta:
resource_name = 'notamodel'
object_class = NotAModel
authorization = DjangoAuthorization()
class AuthorizationTestCase(TestCase):
fixtures = ['note_testdata']
def test_no_rules(self):
request = HttpRequest()
resource = NoRulesNoteResource()
auth = resource._meta.authorization
bundle = resource.build_bundle(request=request)
bundle.request.method = 'GET'
self.assertEqual(len(auth.read_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.read_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'POST'
self.assertRaises(NotImplementedError, auth.create_list, resource.get_object_list(bundle.request), bundle)
self.assertTrue(auth.create_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'PUT'
self.assertEqual(len(auth.update_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.update_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'DELETE'
self.assertEqual(len(auth.delete_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.delete_detail(resource.get_object_list(bundle.request)[0], bundle))
def test_read_only(self):
request = HttpRequest()
resource = ReadOnlyNoteResource()
auth = resource._meta.authorization
bundle = resource.build_bundle(request=request)
bundle.request.method = 'GET'
self.assertEqual(len(auth.read_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.read_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'POST'
self.assertEqual(len(auth.create_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.create_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'PUT'
self.assertEqual(len(auth.update_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.update_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'DELETE'
self.assertEqual(len(auth.delete_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.delete_detail, resource.get_object_list(bundle.request)[0], bundle)
class DjangoAuthorizationTestCase(TestCase):
fixtures = ['note_testdata']
def setUp(self):
super(DjangoAuthorizationTestCase, self).setUp()
self.add = Permission.objects.get_by_natural_key('add_note', 'core', 'note')
self.change = Permission.objects.get_by_natural_key('change_note', 'core', 'note')
self.delete = Permission.objects.get_by_natural_key('delete_note', 'core', 'note')
self.user = User.objects.all()[0]
self.user.user_permissions.clear()
def test_no_perms(self):
# sanity check: user has no permissions
self.assertFalse(self.user.get_all_permissions())
request = HttpRequest()
request.user = self.user
# with no permissions, api is read-only
resource = DjangoNoteResource()
auth = resource._meta.authorization
bundle = resource.build_bundle(request=request)
bundle.request.method = 'GET'
self.assertEqual(len(auth.read_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.read_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'POST'
self.assertEqual(len(auth.create_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.create_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'PUT'
self.assertEqual(len(auth.update_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.update_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'DELETE'
self.assertEqual(len(auth.delete_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.delete_detail, resource.get_object_list(bundle.request)[0], bundle)
def test_add_perm(self):
request = HttpRequest()
request.user = self.user
# give add permission
request.user.user_permissions.add(self.add)
request = HttpRequest()
request.user = self.user
resource = DjangoNoteResource()
auth = resource._meta.authorization
bundle = resource.build_bundle(request=request)
bundle.request.method = 'GET'
self.assertEqual(len(auth.read_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.read_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'POST'
self.assertEqual(len(auth.create_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.create_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'PUT'
self.assertEqual(len(auth.update_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.update_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'DELETE'
self.assertEqual(len(auth.delete_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.delete_detail, resource.get_object_list(bundle.request)[0], bundle)
def test_change_perm(self):
request = HttpRequest()
request.user = self.user
# give change permission
request.user.user_permissions.add(self.change)
resource = DjangoNoteResource()
auth = resource._meta.authorization
bundle = resource.build_bundle(request=request)
bundle.request.method = 'GET'
self.assertEqual(len(auth.read_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.read_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'POST'
self.assertEqual(len(auth.create_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.create_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'PUT'
self.assertEqual(len(auth.update_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.update_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'DELETE'
self.assertEqual(len(auth.delete_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.delete_detail, resource.get_object_list(bundle.request)[0], bundle)
def test_delete_perm(self):
request = HttpRequest()
request.user = self.user
# give delete permission
request.user.user_permissions.add(self.delete)
resource = DjangoNoteResource()
auth = resource._meta.authorization
bundle = resource.build_bundle(request=request)
bundle.request.method = 'GET'
self.assertEqual(len(auth.read_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.read_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'POST'
self.assertEqual(len(auth.create_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.create_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'PUT'
self.assertEqual(len(auth.update_list(resource.get_object_list(bundle.request), bundle)), 0)
self.assertRaises(Unauthorized, auth.update_detail, resource.get_object_list(bundle.request)[0], bundle)
bundle.request.method = 'DELETE'
self.assertEqual(len(auth.delete_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.delete_detail(resource.get_object_list(bundle.request)[0], bundle))
def test_all(self):
request = HttpRequest()
request.user = self.user
request.user.user_permissions.add(self.add)
request.user.user_permissions.add(self.change)
request.user.user_permissions.add(self.delete)
resource = DjangoNoteResource()
auth = resource._meta.authorization
bundle = resource.build_bundle(request=request)
bundle.request.method = 'GET'
self.assertEqual(len(auth.read_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.read_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'POST'
self.assertEqual(len(auth.create_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.create_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'PUT'
self.assertEqual(len(auth.update_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.update_detail(resource.get_object_list(bundle.request)[0], bundle))
bundle.request.method = 'DELETE'
self.assertEqual(len(auth.delete_list(resource.get_object_list(bundle.request), bundle)), 4)
self.assertTrue(auth.delete_detail(resource.get_object_list(bundle.request)[0], bundle))
| 45.095436
| 114
| 0.715219
| 1,293
| 10,868
| 5.836814
| 0.070379
| 0.156751
| 0.126143
| 0.155824
| 0.846297
| 0.838081
| 0.816483
| 0.797668
| 0.786273
| 0.766795
| 0
| 0.006207
| 0.169856
| 10,868
| 240
| 115
| 45.283333
| 0.830304
| 0.012974
| 0
| 0.777143
| 0
| 0
| 0.0208
| 0
| 0
| 0
| 0
| 0
| 0.325714
| 1
| 0.045714
| false
| 0
| 0.045714
| 0
| 0.177143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c3c57809d2e495b6adaade17e0d37d8fa7a4047
| 8,098
|
py
|
Python
|
satchless/contrib/pricing/simpleqty/tests.py
|
bartels/satchless
|
4d333014333dc4fd5815f9e0bbea565959919a30
|
[
"BSD-4-Clause"
] | 1
|
2015-11-05T10:26:46.000Z
|
2015-11-05T10:26:46.000Z
|
satchless/contrib/pricing/simpleqty/tests.py
|
bartels/satchless
|
4d333014333dc4fd5815f9e0bbea565959919a30
|
[
"BSD-4-Clause"
] | null | null | null |
satchless/contrib/pricing/simpleqty/tests.py
|
bartels/satchless
|
4d333014333dc4fd5815f9e0bbea565959919a30
|
[
"BSD-4-Clause"
] | null | null | null |
from django.db import models
from django.test import TestCase
from ....cart.tests.app import MockCart
from ....pricing import Price
from ....pricing.handler import PricingQueue
from ....product.models import Product, Variant
from . import SimpleQtyPricingHandler
from .models import ProductPriceMixin, VariantPriceOffsetMixin, PriceQtyOverride
class DeadParrot(ProductPriceMixin, Product):
species = models.CharField(max_length=20)
class DeadParrotVariant(VariantPriceOffsetMixin, Variant):
product = models.ForeignKey(DeadParrot,
related_name='variants')
looks_alive = models.BooleanField()
class TestPriceQtyOverride(PriceQtyOverride):
product = models.ForeignKey(DeadParrot,
related_name='qty_overrides')
class HandlerTestCase(TestCase):
def setUp(self):
self.pricing_queue = PricingQueue(SimpleQtyPricingHandler)
def test_product_price(self):
unit_price = 10
macaw = DeadParrot.objects.create(slug='macaw', price=unit_price,
species="Hyacinth Macaw")
price_range = self.pricing_queue.get_product_price_range(macaw)
self.assertEqual(price_range.min_price, Price(unit_price, unit_price))
self.assertEqual(price_range.max_price, Price(unit_price, unit_price))
def test_variant_price_qty_override_in_variant_mode(self):
unit_price = 10
macaw = DeadParrot.objects.create(slug='macaw', price=unit_price,
species="Hyacinth Macaw")
macaw_a = macaw.variants.create(looks_alive=True)
# two price overrides points
qt_override_1 = macaw.qty_overrides.create(min_qty=3, price=8)
qt_override_2 = macaw.qty_overrides.create(min_qty=6, price=7)
price = self.pricing_queue.get_variant_price(macaw_a, quantity=1)
self.assertEqual(price, Price(unit_price, unit_price))
# first overrides override should be applied
price = self.pricing_queue.get_variant_price(macaw_a,
quantity=qt_override_1.min_qty)
self.assertEqual(price, Price(qt_override_1.price,
qt_override_1.price))
# second overrides override should be applied
price = self.pricing_queue.get_variant_price(macaw_a,
quantity=qt_override_2.min_qty)
self.assertEqual(price, Price(qt_override_2.price,
qt_override_2.price))
def test_variant_price_qty_override_in_product_mode(self):
unit_price = 10
macaw = DeadParrot.objects.create(slug='macaw', price=unit_price,
qty_mode='product',
species="Hyacinth Macaw")
macaw_a = macaw.variants.create(looks_alive=True)
macaw_d = macaw.variants.create(looks_alive=False)
cart = MockCart()
cart_item_a = cart.add_item(macaw_a, 1)
cart.add_item(macaw_d, 1)
qt_override_1 = macaw.qty_overrides.create(min_qty=3, price=8)
qt_override_2 = macaw.qty_overrides.create(min_qty=6, price=7)
price = self.pricing_queue.get_variant_price(macaw_a, quantity=1,
cart=cart,
cartitem=cart_item_a)
self.assertEqual(price, Price(unit_price, unit_price))
# because 1 macaw_d variant is in cart:
# qt_override_1.min_qty - 1 + 1 = qt_override_1.min_qty
cart_item_a = cart.replace_item(macaw_a, qt_override_1.min_qty-1)
price = self.pricing_queue.get_variant_price(macaw_a,
cart=cart,
cartitem=cart_item_a)
self.assertEqual(price, Price(qt_override_1.price,
qt_override_1.price))
# because 1 macaw_d variant is in cart:
# qt_override_2.min_qty - 1 + 1 = qt_override_2.min_qty
cart_item_a = cart.replace_item(macaw_a, qt_override_2.min_qty-1)
price = self.pricing_queue.get_variant_price(macaw_a,
cart=cart,
cartitem=cart_item_a)
self.assertEqual(price, Price(qt_override_2.price,
qt_override_2.price))
def test_variant_price_offset(self):
unit_price = 10
macaw = DeadParrot.objects.create(slug='macaw', price=unit_price,
species="Hyacinth Macaw")
macaw_a = macaw.variants.create(looks_alive=True, price_offset=2)
price = self.pricing_queue.get_variant_price(macaw_a,
quantity=1)
self.assertEqual(price, Price(unit_price + macaw_a.price_offset,
unit_price + macaw_a.price_offset))
def test_variant_price_offset_with_qty_overrides_in_variant_mode(self):
unit_price = 10
macaw = DeadParrot.objects.create(slug='macaw', price=unit_price,
species="Hyacinth Macaw")
macaw_a = macaw.variants.create(looks_alive=True, price_offset=2)
# two price overrides points
qt_override_1 = macaw.qty_overrides.create(min_qty=3, price=8)
qt_override_2 = macaw.qty_overrides.create(min_qty=6, price=7)
price = self.pricing_queue.get_variant_price(macaw_a, quantity=1)
self.assertEqual(price, Price(unit_price + macaw_a.price_offset,
unit_price + macaw_a.price_offset))
# first overrides override should be applied
price = self.pricing_queue.get_variant_price(macaw_a,
quantity=qt_override_1.min_qty)
self.assertEqual(price, Price(qt_override_1.price + macaw_a.price_offset,
qt_override_1.price + macaw_a.price_offset))
# second overrides override should be applied
price = self.pricing_queue.get_variant_price(macaw_a,
quantity=qt_override_2.min_qty)
self.assertEqual(price, Price(qt_override_2.price + macaw_a.price_offset,
qt_override_2.price + macaw_a.price_offset))
def test_variant_price_offset_with_qty_override_in_product_mode(self):
unit_price = 10
macaw = DeadParrot.objects.create(slug='macaw', price=unit_price,
qty_mode='product',
species="Hyacinth Macaw")
macaw_a = macaw.variants.create(looks_alive=True, price_offset=2)
macaw_d = macaw.variants.create(looks_alive=False)
cart = MockCart()
cart_item_a = cart.add_item(macaw_a, 1)
cart.add_item(macaw_d, 1)
qt_override_1 = macaw.qty_overrides.create(min_qty=3, price=8)
# because 1 macaw_d variant is in cart:
# qt_override_1.min_qty - 1 + 1 = qt_override_1.min_qty
cart_item_a = cart.replace_item(macaw_a, qt_override_1.min_qty-1)
price = self.pricing_queue.get_variant_price(macaw_a,
cart=cart,
cartitem=cart_item_a)
self.assertEqual(price, Price(qt_override_1.price + macaw_a.price_offset,
qt_override_1.price + macaw_a.price_offset))
# variant without price offset
price = self.pricing_queue.get_variant_price(macaw_d,
cart=cart,
cartitem=cart_item_a)
self.assertEqual(price, Price(qt_override_1.price,
qt_override_1.price))
| 47.081395
| 84
| 0.591998
| 929
| 8,098
| 4.838536
| 0.092573
| 0.080089
| 0.053838
| 0.05495
| 0.833815
| 0.832258
| 0.796885
| 0.796885
| 0.768854
| 0.768854
| 0
| 0.016099
| 0.332675
| 8,098
| 171
| 85
| 47.356725
| 0.815692
| 0.066436
| 0
| 0.727273
| 0
| 0
| 0.019746
| 0
| 0
| 0
| 0
| 0
| 0.115702
| 1
| 0.057851
| false
| 0
| 0.066116
| 0
| 0.190083
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c5faa1f76539acb7021410ad9fcbed0fc8276b1
| 21,022
|
py
|
Python
|
modeflip/data/dev/sophia.py
|
lmcgnlzt/modeflip
|
415e9441561fe0ba40544b8313d912dd65c93981
|
[
"Apache-2.0"
] | null | null | null |
modeflip/data/dev/sophia.py
|
lmcgnlzt/modeflip
|
415e9441561fe0ba40544b8313d912dd65c93981
|
[
"Apache-2.0"
] | null | null | null |
modeflip/data/dev/sophia.py
|
lmcgnlzt/modeflip
|
415e9441561fe0ba40544b8313d912dd65c93981
|
[
"Apache-2.0"
] | null | null | null |
#coding=utf-8
import os.path
import random
from datetime import datetime
from modeflip.utils.config import get_configuration
from modeflip.utils.mongo import MongoManager
from modeflip.models.designer import *
from modeflip.models.collection import *
from modeflip.models.garment import *
local_config = get_configuration()
get_database = MongoManager(local_config, force_load=True)
config_db = get_database('mf_config')
dc = DesignerConfig(config_db)
cc = CollectionConfig(config_db)
gc = GarmentConfig(config_db)
############################################################################################################
DID = 3
intro = "来自纽约,Burberry全球时尚顾问,Juicy Couture,Rebecca Minkoff全球设计总监,好莱坞女星及歌手最爱的设计师之一,包括Taylor Swift,暮光之城女主角Christine Stewart、纽约第一社交名媛Olivia Palermo(gossip girl现实版)、Sex and the City女主角Sarah Jessica Parker、维密天使Miranda Kerr等。"
bio = """Sophia Tezel,来自纽约,Burberry全球时尚顾问,Juicy Couture,Rebecca Minkoff全球设计总监。
|Sophia Tezel从小在爸妈的服装工厂长大,耳濡目染下拥有了对时尚敏锐的嗅觉,和对工艺精巧的把握,并将这些渗透她的每一件设计作品之中。她认为时尚应该是美与实穿的结合,是有着让人惊艳的细节而让人总想再多看一眼。
|Sophia的身影总是出现在时尚的一线前沿,为包括Rebecca Minkoff、Nicole Miller、Juicy Couture等纽约年轻人钟爱的品牌担任设计总监,将她极高标准的国际品牌设计惊艳为品牌注入新鲜血液带来全新活力。为了达到美与实穿的完美结合,她总是倾注其才华与多年在设计开发生产的经验。她的身边永远有面料、纸板、无数的细节包围,而时至今日的她依然会为色彩、质感、工艺—所有组成一件完美服装的元素而激动和澎湃,对她而言,一件服装的诞生是一个有魔力的美好的过程。
|Sophia,在二十几岁时曾在悉尼创建自己的品牌,后来带着一颗要在世界时尚的中心大展拳脚的野心来到了美国纽约。她自己的品牌IS获得了无数的赞美和掌声,当时销往美国顶级奢侈品百货商场包括Barneys、Bergdorf Goodman、Saks等等。年轻的她很早便登上各大时尚杂志,包括Vogue、Elle、WWWD等。
|Sophia的设计总是受到好莱坞明星和歌手的青睐,包括Taylor Swift,暮光之城女主角Christine Stewart、纽约第一社交名媛Olivia Palermo(gossip girl现实版)、Sex and the City女主角Sarah Jessica Parker、维密天使Miranda Kerr等等。
|如今Sophia想用她对牛仔的热爱为时尚圈带来一个全新的牛仔品牌,并与2016年在纽约华丽亮相。"""
profile_images = ProfileImages(
icon_url='images/resources/sophia/icon/icon.jpg',
image_url='images/resources/sophia/icon/image.jpg',
background_url='images/resources/sophia/icon/background.jpg',
)
thumbnails_ids = [24, 45, 6, 44, 14, 46, 49, 38]
thumbnails = ['images/resources/sophia/experience/thumbnails/{}s.jpg'.format(i) for i in thumbnails_ids if os.path.isfile('/Users/mli/modeapp/modeapp/static/images/resources/sophia/experience/thumbnails/{}s.jpg'.format(i))]
ids = ['0', '1', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '14', '15', '18', '19', '20', '21', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '40', '41', '42', '43', '44', '45', '46']
pics = [Picture(thumbnail='images/resources/sophia/experience/pics/{}s.jpg'.format(i), image='images/resources/sophia/experience/pics/{}.jpg'.format(i)) for i in thumbnails_ids ]
[pics.append(Picture(thumbnail='images/resources/sophia/experience/pics/{}s.jpg'.format(i), image='images/resources/sophia/experience/pics/{}.jpg'.format(i))) for i in ids if i not in thumbnails_ids]
experience_content = ExperienceContent(
brands=[
'images/resources/sophia/experience/brand/burberry.jpg',
'images/resources/sophia/experience/brand/juicy.jpg',
'images/resources/sophia/experience/brand/r_minkoff.jpg',
'images/resources/sophia/experience/brand/sophia.jpg',
],
thumbnails=thumbnails,
pics=pics,
# videos=[
# Video(
# thumbnail='images/resources/sophia/experience/videos/thumbnail.png',
# poster='images/resources/sophia/experience/videos/thumbnail.jpg',
# url='images/resources/sophia/experience/videos/MaxMara.mp4',
# )
# ]
)
exclusive_content = ExclusiveContent(
pics=[
'images/resources/sophia/exclusive/pics/1.jpg',
],
)
# pre_mkt_content = PreMarketContent(
# target_date='July 24, 2016 12:00:01',
# target_pic='images/resources/sophia/premarket/soon.jpg',
# )
# signatrue_products = [
# SignatrueProduct(
# picture='images/resources/sophia/product/1.jpg',
# title='设计师环保活页笔记本夹',
# subtitle='标志产品',
# desc='这是一段关于活页笔记本夹的简要介绍,字体可以调整',
# shop_link='http://notebook_shop_link',
# ),
# SignatrueProduct(
# picture='images/resources/sophia/product/2.jpg',
# title='A5 设计师独家设计卡',
# # subtitle='Signature Product',
# # desc='This is an awesome notebook, you will love it',
# shop_link='http://notebook_shop_link',
# ),
# ]
# private_musics = [
# PrivateMusic(
# music_icon='http://music_icon_1.com',
# title='Ugly Is Beautiful',
# author='David Usher',
# link='http://private_music_1.com'
# ),
# PrivateMusic(
# music_icon='http://music_icon_2.com',
# title='Beautiful Is Ugly',
# author='Usher David',
# link='http://private_music_2.com'
# ),
# ]
d = Designer(
did=DID,
name='Sophia Tezel',
profile_images=profile_images,
is_active=True,
on_market=True,
origin='NEW YORK',
intro=intro,
bio=bio,
experience_content=experience_content,
exclusive_content=exclusive_content,
# pre_mkt_content=pre_mkt_content,
# signatrue_products=signatrue_products,
# private_musics=private_musics,
)
dc.set(d)
print 'designer info saved'
############################################################################################################
CID_1 = 1
sophia_collections = Collection(
cid=CID_1,
did=DID,
title='SOPHIA TEZEL',
desc='作为好莱坞女星最爱的设计师之一,Sophia的设计总是受到好莱坞明星和歌手的青睐,包括Taylor Swift,暮光之城女主角Christine Stewart、纽约第一社交名媛Olivia Palermo(gossip girl现实版)、Sex and the City女主角Sarah Jessica Parker、维密天使Miranda Kerr等等。',
released=datetime(2016, 7, 25),
signatrue_pics=[
'images/resources/sophia/collections/sophiacollections/signature/pics/1.jpg',
'images/resources/sophia/collections/sophiacollections/signature/pics/2.jpg',
],
# signatrue_videos=[
# Video(
# thumbnail='images/resources/sophia/collections/201607/signature/videos/thumbnail.png',
# poster='images/resources/sophia/collections/201607/signature/videos/thumbnail.jpg',
# url='images/resources/sophia/collections/201607/signature/videos/MaxMara.mp4',
# )
# ],
# signatrue_musics=[
# 'http://sig_music_1.com',
# 'http://sig_music_2.com',
# ],
new_arrival=True,
)
collections = [
sophia_collections,
]
[cc.set(c) for c in collections]
print 'collection info saved'
############################################################################################################
fiona_pants = Garment(
gid=1,
cid=CID_1,
did=DID,
price=735,
shop_link='https://wap.koudaitong.com/v2/goods/277ly787ymf1v',
pic = Picture(title="Fiona Pants - 阔腿裤", image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/6.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/7.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/8.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/9.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants/details/10.jpg'),
]
)
diana_top = Garment(
gid=2,
cid=CID_1,
did=DID,
price=813,
shop_link='https://wap.koudaitong.com/v2/goods/3f0arh7vkfqeb',
pic = Picture(title="Diana Top - 蕾丝上衣", image='images/resources/sophia/collections/sophiacollections/garments/diana_top/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/6.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/7.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/8.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top/details/9.jpg'),
]
)
anne_skirt = Garment(
gid=3,
cid=CID_1,
did=DID,
price=850,
shop_link='https://wap.koudaitong.com/v2/goods/3enymaetx4k4j',
pic = Picture(title="Anne Skirt - 百褶裙", image='images/resources/sophia/collections/sophiacollections/garments/anne_skirt/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/anne_skirt/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/anne_skirt/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/anne_skirt/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/anne_skirt/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/anne_skirt/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/anne_skirt/details/5.jpg'),
]
)
corrine_shirt_black = Garment(
gid=4,
cid=CID_1,
did=DID,
price=682,
shop_link='https://wap.koudaitong.com/v2/goods/2frleanqm1fpf',
pic = Picture(title="Corrine Shirt - 衬衫(黑色)", image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_black/details/6.jpg'),
]
)
corrine_shirt_white = Garment(
gid=5,
cid=CID_1,
did=DID,
price=682,
shop_link='https://wap.koudaitong.com/v2/goods/2xbg9oeh5ieoj',
pic = Picture(title="Corrine Shirt - 衬衫(白色)", image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/corrine_shirt_white/details/6.jpg'),
]
)
diana_top_blue = Garment(
gid=6,
cid=CID_1,
did=DID,
price=813,
shop_link='https://wap.koudaitong.com/v2/goods/1y2r95tvpc0o3',
pic = Picture(title="Diana Top - 蕾丝上衣(蓝色)", image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/diana_top_blue/details/6.jpg'),
]
)
fiona_dress = Garment(
gid=7,
cid=CID_1,
did=DID,
price=910,
shop_link='https://wap.koudaitong.com/v2/goods/364245n7v25lv',
pic = Picture(title="Fiona Dress - 连衣裙", image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/6.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_dress/details/7.jpg'),
]
)
kelly_dress = Garment(
gid=8,
cid=CID_1,
did=DID,
price=850,
shop_link='https://wap.koudaitong.com/v2/goods/2frkcz73cc6f7',
pic = Picture(title="Kelly Dress - 连衣裙", image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/kelly_dress/details/6.jpg'),
]
)
cappa_black = Garment(
gid=9,
cid=CID_1,
did=DID,
price=680,
shop_link='https://wap.koudaitong.com/v2/goods/3f0bz097p3uqr',
pic = Picture(title="Cappa Black - 披肩", image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_black/details/6.jpg'),
]
)
cappa_blue = Garment(
gid=10,
cid=CID_1,
did=DID,
price=680,
shop_link='https://wap.koudaitong.com/v2/goods/361l9h4hu41pv',
pic = Picture(title="Cappa Blue - 披肩", image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/details/4.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/details/5.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/cappa_blue/details/6.jpg'),
]
)
fiona_pants_blue = Garment(
gid=11,
cid=CID_1,
did=DID,
price=735,
shop_link='https://wap.koudaitong.com/v2/goods/1y7ogg4xp6mcz',
pic = Picture(title="Fiona Pants Blue - 阔腿裤", image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants_blue/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants_blue/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants_blue/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants_blue/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants_blue/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/fiona_pants_blue/details/4.jpg'),
]
)
hellen_dress = Garment(
gid=12,
cid=CID_1,
did=DID,
price=990,
shop_link='https://wap.koudaitong.com/v2/goods/3ewlwiecsmotv',
pic = Picture(title="Hellen Dress - 连衣裙", image='images/resources/sophia/collections/sophiacollections/garments/hellen_dress/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_dress/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_dress/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_dress/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_dress/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_dress/details/4.jpg'),
]
)
hellen_sundress = Garment(
gid=13,
cid=CID_1,
did=DID,
price=860,
shop_link='https://wap.koudaitong.com/v2/goods/3f1ikzrec8rz7',
pic = Picture(title="Hellen Sundress - 太阳裙", image='images/resources/sophia/collections/sophiacollections/garments/hellen_sundress/cover.jpg'),
details=[
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_sundress/details/0.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_sundress/details/1.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_sundress/details/2.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_sundress/details/3.jpg'),
Picture(image='images/resources/sophia/collections/sophiacollections/garments/hellen_sundress/details/4.jpg'),
]
)
garments = [
fiona_pants,
diana_top,
anne_skirt,
corrine_shirt_black,
corrine_shirt_white,
diana_top_blue,
fiona_dress,
kelly_dress,
cappa_black,
cappa_blue,
fiona_pants_blue,
hellen_dress,
hellen_sundress,
]
[gc.set(g) for g in garments]
print 'garments info saved'
| 46.406181
| 243
| 0.753496
| 2,480
| 21,022
| 6.279435
| 0.12621
| 0.125217
| 0.175303
| 0.226032
| 0.789957
| 0.776023
| 0.731715
| 0.7138
| 0.696847
| 0.652861
| 0
| 0.020132
| 0.095043
| 21,022
| 453
| 244
| 46.406181
| 0.798413
| 0.077348
| 0
| 0.172956
| 0
| 0.018868
| 0.654536
| 0.580437
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.025157
| null | null | 0.009434
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7424c15d4d75a5cf441234b6d6c6e2de8a307d5
| 34,493
|
py
|
Python
|
task1/checker_client.py
|
jzfrank/pai-task1-GPR
|
e96beffe618872adf9026168a4466a4934c46d5b
|
[
"MIT"
] | null | null | null |
task1/checker_client.py
|
jzfrank/pai-task1-GPR
|
e96beffe618872adf9026168a4466a4934c46d5b
|
[
"MIT"
] | null | null | null |
task1/checker_client.py
|
jzfrank/pai-task1-GPR
|
e96beffe618872adf9026168a4466a4934c46d5b
|
[
"MIT"
] | null | null | null |
from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x08\x00\x55\x0d\x0d\x0a\x07\x2c\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x57\x21\x00\x00\x00\x00\x00\x3e\xce\x07\x37\xf9\xb9\xd9\x64\x07\x0a\x10\xb8\x19\x85\xd9\xbd\x53\x00\x00\x00\x00\x00\x00\x00\x00\x54\x50\x91\xcb\x10\x48\xe3\xc8\x28\xa7\x76\x2e\x5d\x07\x24\xc0\xc3\x89\xb8\xe6\xc4\x85\x7e\xf9\x48\x21\x8e\x3a\x65\x75\x52\xfb\x55\x79\xfc\xe6\x95\x45\xf3\xbd\x48\x94\x77\x38\xa1\x73\x0d\x47\xec\x59\xa6\x47\x15\x1b\xf0\x2d\xf0\x71\x1d\x31\x11\x37\x76\x8f\x70\x34\xec\x5b\x47\xcf\xff\x93\x51\x2d\xb2\x90\x97\xd2\x91\xfc\x26\x2e\xf4\x38\x56\x11\x90\x1f\xec\x0a\x29\x94\xba\xc5\x5b\xcc\xb6\xa8\x06\x59\xed\x36\xd1\x11\x45\x5c\xd2\xcd\xc9\xbe\xa3\x30\x07\xc4\x29\x64\xa5\xe8\x7b\x27\x28\x24\xff\x31\xb5\xa5\x64\xec\x29\x3b\x85\x7d\xcc\xdf\xbe\x2a\xac\xbe\x3e\x1f\x6f\x7e\x16\x68\x09\xfb\x7c\xe7\x69\x08\x2a\xae\xd2\xa3\x72\x5e\xee\xd6\x83\xf2\x51\x4d\x2d\xfb\xa9\x52\x11\xd7\x2e\x31\x3e\x0b\x87\x5b\xd4\xcf\x1b\xc5\xfa\x92\xc0\x95\x17\xf0\x5d\x41\x2a\xc4\x8a\xd8\xee\x38\x55\x18\xf2\xc2\x17\x67\xf2\x37\x95\xe6\x76\xd8\xf6\x91\xc6\xa8\x3d\xa7\x19\xa1\x78\xb4\x43\xa3\xa3\x2f\xa1\xee\xf2\x61\x47\x44\xec\x98\x2c\x1d\xa6\x2c\x44\x31\x5d\xb5\x83\xa2\x87\x03\x30\x70\x34\xbc\x87\x7e\x27\xfb\x02\xb0\x2a\xfe\xb9\x6a\x6c\xdc\xb4\x13\x65\x9f\x7b\x1e\x50\x9b\xa2\x03\xa3\xdb\x55\xa4\x10\xcb\x3c\xc0\xd9\xb4\xd3\xb0\x7a\xb0\xdd\x46\x55\x3a\x37\xa0\x1f\x3b\xb3\x5c\xa5\x21\x3c\x4a\x71\x86\x21\xe5\x9d\x2b\x9a\x68\xe9\x7f\xd2\x0a\xa0\xef\xcf\x57\x93\x52\xa3\x73\x16\xb8\x03\xdf\x48\xd4\x4d\xce\xd2\x2c\x05\xfd\xbe\x23\x28\xc9\x14\x1b\x2d\xb9\x12\x3b\xaf\xa1\x38\x97\x8e\xab\xf6\x53\xf3\x60\xe8\xb9\x7c\xe3\x6f\x63\xb2\xd1\xdf\xfc\x4a\x2c\x1e\x4d\x70\xcf\x9b\x75\x3d\x8e\xb6\x0a\x04\xc7\x73\xdf\x06\x38\x26\x79\xce\x83\xac\x7c\x63\xc0\xb5\xc3\xeb\xd6\x9f\x0b\xf7\xa0\x61\x8e\xe6\xfc\xd8\x0a\x1f\xb6\x85\x3e\xd9\x8a\x0d\x8a\xa9\x50\x1e\x15\x1d\x0a\x61\x2b\x3d\xc0\x81\xb2\x2b\xfe\x5d\xe2\x74\x8f\x6b\x90\xf2\x29\x74\x94\x75\x2a\xb3\xe6\x7f\x02\x01\x5a\x6d\x8c\xf2\x20\x7e\x08\x67\x82\xb6\xc3\x1c\xdc\x4c\xce\x0e\xe0\x82\x75\xb1\xc2\x4b\x30\xec\x55\xc8\x55\x66\x53\x93\x6d\x2d\x3a\xac\x1d\x4c\x7a\xda\xe4\xd4\x3c\x87\xb9\x8e\x2c\x94\x70\x9d\x72\x2d\x82\x55\xe0\x4e\x20\x0b\x80\x4f\x3c\xe5\x15\xfc\xfb\xa7\x76\x8f\xf0\xf1\x66\x18\x2d\xa7\xab\x23\xe9\x34\x9b\x3c\xb5\x31\x3c\xdc\x03\x9a\x77\x0f\x2b\xae\xd4\x0d\xd4\xcf\x70\x0b\x11\xe9\x5b\x7f\x23\xec\x30\xfb\xc7\x54\x40\x90\x58\xfd\xba\x7f\x53\x12\x8a\x0f\x3c\x4f\x3b\xa1\x93\x5d\x78\x10\x3a\x3d\x89\xe6\x73\x03\xda\x2b\xfa\xf0\xb5\x2b\x85\x4b\xd8\x1a\xd7\x05\x2c\x95\xaf\x95\x68\x85\x94\xa3\xbc\x12\xce\xe2\x5e\xd2\x10\x42\x4e\xe5\x23\x4c\x6f\x6c\xce\x8e\x1b\xfa\x0e\xd0\xbf\xf7\xdd\xf9\x72\xce\x0f\x3b\x02\x51\xcf\x81\xd8\x04\xc8\x98\xe7\x48\xe5\x9a\xbb\xdf\x14\x94\xc2\xce\xcb\x6a\x77\xaf\x87\x27\xae\x0f\x91\x84\xd2\x17\x35\x45\xa8\x71\xc0\x11\x0a\x19\x0c\x45\x98\x00\x0e\x1e\xe0\xa3\xbc\x32\x1d\x24\x89\x68\x91\x15\xd4\xea\x00\x8d\x63\x94\x13\x48\xe3\x2a\x1a\xab\xcd\x9c\x84\xc3\x71\xa2\x7a\xee\x0e\xb2\x5b\xa6\xf6\x66\x5c\x03\xe8\x65\xcf\x1f\xe3\x57\x74\x43\x48\x2b\x8d\x60\x7d\x26\xd8\xaa\xe1\x09\xb4\x4d\xc8\xf1\xb5\xba\xad\x25\xcf\x80\xf3\x61\x5f\xe5\x85\x4a\xc2\x34\x2e\x3d\x43\x3b\x38\x85\xfe\xfd\xf5\xb5\x04\x19\x1a\x49\x53\x96\xff\xff\x92\x80\xce\x07\x9b\x7a\x23\x3f\x2a\x8b\xd4\x52\xef\x47\xec\xad\x2b\xe6\x11\x4c\xc9\x8a\x54\x67\x9d\x76\x1a\x50\xf3\x13\xc1\xb9\xc1\x68\x6f\x96\x71\xc7\x60\x2f\xc8\x0d\x6e\x83\xb4\x6b\xcf\xc8\xba\xfc\x49\xf9\xd4\xd9\x2c\x77\x78\xb8\x1c\x72\x0e\x80\xfc\xc1\x29\x0e\x6b\x58\x70\xfb\x64\x17\x33\x72\xe8\x4a\x21\xe4\x40\x51\xf1\xee\xc1\x42\x72\x2e\xc2\xe5\xfe\x5b\xaa\x97\x54\x93\xc8\xe3\x12\x09\x96\x5d\x4b\x73\xdf\xe1\x12\xaa\x98\xcd\xdd\x5e\x68\x35\xc9\xe7\xaf\xe5\xf9\x70\x5c\x85\x42\x7e\x61\xcc\xea\x6a\x5b\xfd\xd4\x14\x2a\xf0\xda\xe8\x9a\xdc\x8b\x40\x0d\xb8\xcd\x89\x99\xa8\x77\xa7\xc3\xb8\x80\xc2\x74\x0f\x07\xe0\x1a\xaf\xcb\xb9\x07\x34\x57\x0e\xe6\x41\xd1\x7f\x5b\x83\x40\x89\x52\xb5\xa6\x6c\x2e\xc3\xc1\x3c\x16\x1f\x35\x45\x1b\x67\x51\x7b\xbb\xfd\x65\x5f\xe4\x71\x95\xed\xb0\xb1\xc0\x01\x9d\x51\xb5\x98\x69\x56\xfe\xa8\xbf\x00\xcd\x63\x51\x5e\xa5\x92\xee\x28\x9d\x64\xd5\xb3\x76\xd6\xe3\xe7\xde\xb0\x53\xa0\x82\x35\x69\x84\x9f\x3e\x9d\x72\x26\x04\x87\x38\xa3\x30\x5c\x5c\x65\x4c\x21\x99\xc8\x03\xc0\x23\xf8\x3e\x03\xe1\xb0\xaa\xd9\x9a\x76\x79\x53\x8d\xb4\x56\xd4\x7f\x90\xdd\x31\x6a\xa8\x53\xb4\x50\x3b\x36\x57\xe2\x10\xcb\xe5\x3e\xe0\xaf\xab\xd4\x0f\x2e\xe7\xff\xe4\x65\xbf\x9a\x12\x29\x46\x3b\xee\x73\x2b\x3a\xdc\x8d\xba\x56\x80\x1e\xa2\x70\x44\x1b\x74\xfa\x74\x37\x51\xee\x3a\xb8\x09\x62\x48\x19\xbe\x94\xfe\x52\xb4\x10\x5e\x88\x1c\x4b\x48\x0a\xd6\xdc\x28\xba\x60\x35\x1d\x62\xfe\x99\xa3\x3f\xc3\xf8\x08\x26\xd1\x22\x01\xa6\x9b\x3b\xe4\x82\xce\x2a\x14\x82\x5c\xf8\x69\x5c\x90\x32\x17\x84\xe3\xdb\x27\xa4\x1b\x9c\x55\xa7\x97\xdd\x37\x7d\xee\x2a\x38\xdc\x15\x07\xba\x2e\x73\x06\x02\x3b\x42\x21\x87\x6b\xe1\x69\xf6\xb6\x15\x57\xe9\x0c\xcf\x18\x40\x17\x48\x57\x0a\xf2\xc4\x8b\x71\x22\xbe\x3a\x91\x7a\xf4\x29\xa1\x42\x07\x63\xa0\xec\xea\xf7\x2a\x80\x8d\x83\x73\x4e\x3e\x83\x21\x40\xaa\x25\xcd\x89\x5e\x23\xe0\x1f\x7f\x1a\x83\xf6\x20\x29\xf5\x7c\xfd\x72\x07\x78\x8d\x9f\xc4\x22\x3c\x35\x69\x19\x4b\x0a\x49\xdf\xb7\x29\xb2\x9d\x22\x1e\xc4\x2e\x81\x69\x63\x89\xf9\x9c\x79\xaa\x47\x4b\x5e\xb4\xa9\x00\xdf\xcd\xda\xb6\x54\x59\x26\xc1\x4e\x41\x8f\x47\xdd\xf8\xd8\x4b\x71\x63\x7c\xda\xb7\xff\x2d\x35\x22\x2a\xdd\x23\x3c\x55\xce\x78\x82\x56\xed\x93\xd7\x37\x30\x18\x17\xc9\xde\xbd\xea\x88\x80\x37\x4a\x5f\xeb\xb2\x87\x34\x99\x50\x23\x0b\x85\xb3\xe0\xc3\x83\xd9\x5f\x48\x29\x99\x6b\x0b\x40\xc9\x1d\x86\x11\x57\xdd\x8f\x3a\xa7\x27\x87\xd2\xf3\x8a\xca\xd7\xe4\xaa\x03\xeb\x38\xb3\xa8\x4d\x2f\x4c\xeb\xcf\x1a\x7c\xa3\xe7\x1d\x96\xe6\x66\xa6\xba\xef\x67\x4b\x1d\xb9\xbb\xb1\xac\xe3\xbd\xe7\xba\xf9\xff\xf0\x5b\x8d\x5a\xb2\xf6\x22\xf3\x9b\x80\xb4\xc4\xd2\x80\x88\x31\x9a\x1e\x01\x36\xd4\x99\x40\xb2\x3e\xee\x42\xe3\x25\x6c\x91\x71\xf8\x66\x63\xd7\x17\xf8\x9f\xc0\xd1\x67\xb9\x14\x9c\xbc\xb7\x42\xd8\x83\x05\xed\x8c\xf7\x9d\x59\xfa\x72\xca\xf0\xca\x20\xfc\xcb\xd9\xc4\xdd\xa7\x7b\x3b\xbf\x8f\xf4\xb2\x5f\xad\x2d\xc5\x49\x1c\x82\x02\xb9\xea\x7b\xfb\xf5\xa2\xe1\xbb\xe0\x46\xf4\x19\xbd\x6e\x82\x0f\x10\xd1\xcf\x3b\xc4\x41\xcd\xe9\x12\xc7\x4d\x71\x3f\x15\xca\x09\x36\x87\x9c\xf4\x97\x4e\x89\x23\x23\x04\xb0\xbc\x87\x8a\x02\x55\x0e\x99\xd0\xdb\x76\x02\xc0\x3f\x62\x2d\xe6\x94\xe8\xb5\xa0\x89\x68\x65\xc6\x13\xd9\x86\x7f\xd7\xf1\xed\x90\xab\x0b\x0c\x8b\xea\xc4\x50\xbd\x07\x02\x97\x0b\x15\x71\x44\xf2\x38\xaa\x51\x90\x86\x66\x7f\xb1\xec\x58\xf6\xeb\x96\x82\xd7\xa5\xeb\xbe\x93\xcb\x20\x16\xcc\x3d\x6f\x43\x07\xda\xe4\xba\x80\xc9\xe5\xc4\x62\x60\x4c\x64\x46\x3a\x7b\x21\xee\xbd\x51\x1c\x3c\x26\xcb\x43\xfa\xfa\x14\xd5\xaf\x27\x12\xe7\xcb\x27\x34\x0f\x68\x45\x68\x55\x39\x67\x6f\x4f\xa1\x39\xb7\x11\x25\x74\x3f\x5f\xae\xaf\xed\x84\x1c\x17\x80\xa9\x94\x5b\x2b\x0b\x33\xe7\x18\xb1\xb8\x33\x14\x87\x10\xe6\x60\xf8\xce\x31\xa2\xaa\x79\xb0\x75\x33\x68\x6c\xc6\xe8\x8f\xf2\x5d\x66\x0d\xfa\xda\x1c\x59\xda\xca\x2b\x67\x6f\xcd\x65\xa5\xfd\xec\x7f\xfa\x9c\x90\x4e\xda\xee\x8e\xd4\xec\x16\xd4\x27\xf6\x34\x5d\x51\x03\xde\x7b\x17\x43\x8c\xdd\x75\x22\x81\xf0\x80\xbf\x57\xa9\x65\x08\xd8\xcc\xcf\x07\xb6\xc8\x44\xc0\xa4\x37\x0d\x1a\x9b\x5f\x3f\x14\x9f\x27\x7d\x10\xa4\xc1\x5f\x1c\x88\x41\xd2\x7d\x87\xa9\x6e\x79\x0d\x22\xe7\x53\xf8\x62\x06\xa3\xa9\xdc\xba\x42\x83\x21\x5d\xa5\xa2\xeb\x63\x2b\x81\x29\x5f\x2f\xad\x35\x1e\xaa\x03\x3c\x78\x91\xeb\xdd\xc7\xba\xbf\x06\xc9\xc8\xd9\x06\xde\xba\x3e\x6b\x17\xe7\xc5\x6b\x5d\x79\x29\x47\xfc\xaf\xb4\xff\x58\x89\xa6\x64\xaf\xa0\x15\xf6\xff\x58\x8c\x73\x6c\x3f\x84\xac\x89\xf8\x02\x55\x8d\x6e\x82\xee\x5d\x06\xf6\x54\x10\x6c\x2b\xf8\x01\xe7\x79\xfa\x8b\x8b\xbc\x26\xd1\x8d\x42\x06\xce\xc5\x3d\x5e\xeb\x7f\xaf\xe8\xf5\x4a\xd5\xfd\x76\x01\x62\xd0\xcd\xac\xb5\x36\x19\xf2\x5e\xe4\xfd\x7a\xcb\xc7\x87\x83\x0b\x78\xe3\x3e\x7f\x71\xb4\xc0\x3f\xe0\x16\xc7\x60\xb3\x2a\x8f\x5b\xab\xc8\x3a\x9e\x32\xde\xfd\xf2\xb4\x75\x93\xae\x11\xfd\x78\xe8\xf5\x70\x3a\x2c\xd4\xa3\xa0\x6c\x24\x73\xc4\xa9\x64\xca\x94\x33\xce\xaa\xb9\x84\x8c\x4b\x40\x2c\xdc\x85\x76\x6e\x71\xd8\xa8\x53\x7f\xa4\x13\xb3\xc0\x93\x3e\x6a\x26\x70\x1a\xa6\x29\xe5\xec\xfe\x9f\x08\xf5\x91\xde\x26\x7c\x08\xf3\x5a\x66\xd1\xb3\xfe\x3f\x1a\x49\x15\xeb\xbf\x32\xc2\x2f\xc6\x10\xe2\x80\xd8\xdc\x87\x21\xbf\x0b\xf4\xcd\xc6\x16\x0b\xac\xbd\x0f\x36\x51\x47\x38\xec\x7b\x49\x79\xc1\x16\x4e\xce\x98\x39\xab\x99\x65\xb3\xe1\x76\x9d\x9f\x32\xaf\x7c\xfd\x00\xa4\xbd\x4b\x8a\xd0\x61\xa1\x7f\xe4\x09\xa2\x48\x67\x8a\xbb\xe4\x1f\x6a\x56\x8f\x75\xde\x0d\xee\x63\xa2\x53\xbc\xc5\x38\x91\x7f\x4b\x44\x8b\x0b\xbb\x68\xd8\x0b\x6d\x66\x74\x74\xf5\xaf\x8f\x49\x65\x19\x0e\xdc\x35\x45\x51\x3f\x1b\x49\x1b\xbd\x3c\x71\x32\x9e\xac\x25\x81\x04\x3a\x99\x7f\x4b\xb2\xd8\xdf\x65\x81\xc7\x60\xf4\x92\x1e\x56\x07\xf6\xa4\x98\x43\x87\x46\x67\xa0\xa7\xf1\x82\x76\x0b\x1e\x43\xc7\x55\x73\x5e\x5b\x83\xe6\xdf\xb3\x12\x75\x48\xfe\xbe\x56\xc7\x83\x3d\x9d\xeb\x43\xd6\x22\x62\x80\x76\x75\xb2\x7a\x2e\xdb\x0e\x82\xf5\x81\xf8\x97\x08\xd3\x55\xbd\x79\xe0\x6b\x48\x86\x47\xaf\x54\x38\xd8\x74\xe3\x42\x83\x94\x43\x12\x56\x18\x8d\x2a\xdd\xb0\x02\xf9\x0f\x93\x6c\xf4\xe6\x47\xe5\x71\x7f\xef\x8f\x97\x71\x44\xcc\xec\x7d\x9a\x10\x31\x3f\x1b\x8e\x87\x35\x6f\x25\xd2\xd4\x14\x76\x83\xed\xae\x1a\x46\xa7\x52\xd2\x4b\x2f\xb7\x7e\x5c\x7d\x2f\x75\x08\x8e\x7f\x0c\x4b\x15\x48\xb4\x22\xe6\x59\x54\x35\xe4\x45\x2f\x6f\x17\xaa\xcc\x50\xb0\xf9\xe2\xba\x6d\x3d\xba\xc7\x1f\xc7\xae\x07\x6d\xe1\xe8\xb4\xee\x13\x2f\xcd\xd3\x22\xf9\x94\x8d\xf9\x66\x6c\xe1\xd2\x9d\x25\xe9\xdb\xcd\x3c\x61\x90\xaa\xd3\xe5\xc5\x6c\x6e\xc8\xc2\x15\xd2\x7b\xb1\xae\x48\x16\x6e\xbe\x59\x25\x8a\x99\x0b\x9c\x97\x3b\xfe\x3d\xda\xa9\xab\x0f\xfc\x31\x3d\xd0\x8f\xda\xb3\x33\xac\x4a\xbf\x17\x68\x94\x13\x40\xa1\x14\x36\x64\xde\x6e\xc9\xd8\xdc\x0f\x4c\x51\x91\xb5\x55\x2a\xa9\xf7\x38\x18\xbb\x52\x40\x01\x71\x57\x97\xb3\xb8\x1f\xdc\xb4\x02\x67\xa6\xe1\x8c\x48\x86\xa9\xa7\xaf\xf3\x54\x3d\xfa\x06\x9c\x70\xaf\x32\x00\xc9\xca\xd1\x95\x0f\x4b\x7b\x9f\xbc\xd1\x34\x07\xfe\xc8\x02\x52\x2c\x7d\xa5\x20\xb1\x74\x54\x64\x27\xb8\xf3\x7a\xb0\x88\xaa\x17\xe8\xec\x03\x89\x8a\xc5\xe0\x46\xc1\x89\x7c\xdc\xe9\xba\x79\x7b\xf1\x4b\x6f\xaa\xa3\x98\x4b\xa3\x71\xff\x6f\x42\x26\xdc\x77\x12\x1b\x48\xd3\x6a\xfe\xbf\x9b\x5b\x7d\x57\x72\x67\x5f\x03\xb2\xd7\x73\x7e\xef\x18\x49\xab\x89\xa2\x52\x13\xbe\x3f\xe0\x92\x9f\x10\x24\x42\x41\x94\x1d\x4b\xa4\x43\x76\xfc\xce\x39\x20\x3a\x94\xde\x60\xf4\x27\x71\x36\x30\xd3\x8b\xbf\xc8\xea\xa3\x68\xa9\xbf\x6a\x5b\x7a\xf9\x0d\xd3\x45\xd4\x9d\xe4\x07\x02\x6a\x3c\x53\xaa\x63\x54\x77\x4a\xbd\x03\x5b\x48\x18\x56\xc1\xe3\x65\xe5\xbe\xdd\xd5\x60\xc7\xd2\x9f\x3e\x9b\x31\xe2\xe9\x40\xc0\xd4\x52\xfb\x44\xcc\x1a\xdd\x64\x46\x0d\x24\xbe\xfd\xd2\x74\xb1\x89\xec\x87\x56\x37\xbe\xa5\xc3\x57\x01\xb2\xb7\x86\x62\x9b\xf0\x7a\x2a\x36\x89\x71\x2c\x5b\x1f\x9e\x90\xe2\x23\x21\x38\xb4\x12\x25\x80\x29\xf5\x00\x67\x94\x34\x1d\x24\x6f\x7b\xe1\xcf\x06\x3c\x6b\x99\x5f\x4f\xb7\x40\x4f\x21\xa6\x50\xbd\x7f\x0c\x2e\xec\x9a\x11\xa2\x1d\x77\xf5\x4e\xc9\x23\xee\x1e\xc6\x04\x41\xae\x8a\x6f\x8f\xb0\x1f\x3e\xcc\x74\xf6\x01\xa5\x96\x84\x08\xce\xc4\x4e\x9d\xaf\xa0\xcf\x0e\xdd\xc2\x88\x1a\x28\xb3\x4e\xb0\x51\x2b\x85\x8d\xec\xc0\x60\x44\x10\xf8\x29\xed\xf0\xc9\xc1\x85\x1f\x0c\xaf\xa6\x52\x7f\x74\x3a\x6e\xc8\x66\xc5\x47\x0d\xb6\xe5\x8c\x2d\x12\xec\x95\x1d\xd6\xda\x50\xc9\xa0\x73\xb2\x7e\x1f\x18\x15\x2f\xe8\xa2\x5b\xf6\x88\x23\xc0\xf9\xbb\x04\xf9\xa1\xff\x78\x1c\x5b\xa9\x1c\xe0\xb4\xe2\x00\x9d\xf0\x21\xf2\x08\xfd\xa8\x30\xaa\x93\x81\xed\x47\xb6\x52\x60\xa9\x0d\xca\xef\xd7\x88\xbe\xb8\xbe\xd8\x52\x28\x6e\x60\x78\xd0\x83\x61\x6f\x07\xc8\x8b\x95\x67\x65\x75\x01\x08\x84\x21\xd2\xe2\x14\x8d\x01\x0c\x04\x88\x23\xb2\xca\x14\x96\x8b\xbb\x8c\xe4\x55\xc1\x14\x4d\x8f\xd3\x2d\x45\xa9\x9d\x8e\xde\x9a\xa1\xec\x8b\x0f\xc9\x68\xd9\x10\x7b\xeb\xdf\x9f\x94\xb0\x67\xc9\xea\xac\x66\xcd\xa1\xba\x83\xee\xd0\xff\x8f\x39\xf2\x84\xfa\xff\xf6\x96\x62\xe1\x1e\x0b\xf1\x77\xcf\x2e\xa1\x90\x25\x1e\x5e\xf0\xf1\xdb\xf8\x22\xa0\x1e\x25\x0a\xd8\x53\xcf\x63\x2e\x9a\x69\xcd\x77\x2e\x55\x37\x34\xec\x21\x62\xe3\xf7\xe0\xb0\xed\x66\xe5\x2d\xce\x24\x95\xdd\x30\x9f\x60\x6e\xf8\xd2\xb5\xe8\x72\xc4\xeb\x32\x56\xda\x51\x21\x7a\x6e\xfc\x8e\x74\x20\x6d\x3d\x07\xe6\xb8\x3d\x23\x0f\x6b\x1e\xef\xb7\x56\x41\x01\x06\x11\xbe\xd1\x72\xc0\xcc\x9d\xe3\xab\x7a\x0c\x60\x02\x35\xe3\x48\xba\x11\x85\x0c\x6b\xc7\x36\x41\x18\x62\xef\xee\x68\x62\x5d\xe7\x6f\x85\x0b\x6c\x2e\x98\xcb\x17\x1d\xd9\xf1\x27\x57\x7b\xea\x98\xfb\x39\x8c\x87\xf8\xd1\xf5\x91\x5d\xfa\x43\xf0\x04\xc8\x8a\x53\xff\x63\x19\x29\x66\x31\xe6\x2c\x23\xa4\x58\x41\x2a\xf0\x57\x2c\x57\xa2\xff\x12\xeb\xd0\xfd\xec\x7e\xf5\xbe\xce\xc2\xea\x48\x40\xc3\x99\x02\xb7\x9f\xf9\xec\x82\x51\x31\x44\x58\x9b\xcb\xbd\xeb\x64\x74\x90\xc5\x33\x30\xbf\xee\x06\xf9\xdb\x55\x8b\x4c\x4c\x12\xca\x87\xfa\x73\xcf\x47\xd5\x44\xb4\x9b\x87\xfd\xfe\x9c\xd6\x09\xa2\x4d\x96\x4a\xd1\x8f\x9f\x58\xd0\x30\x38\xf7\x50\x24\xcb\x99\xaa\x6f\xb4\xac\xed\x16\x07\xc6\xce\x2c\xfc\x58\xcb\xf7\xb6\x40\x4a\x5b\xea\xeb\xe4\x02\x78\x91\xe4\xa7\x9e\x47\x1f\xae\xf4\x42\xb8\x4b\x52\x95\xfd\x8e\x44\xbd\x7d\x3e\x37\x81\x52\x72\xc2\xe7\x07\xf8\xb2\xaf\x95\x1f\x04\xe0\x79\x15\x92\x38\x49\xeb\xac\x95\xf0\xae\xf1\xa6\x5b\xaf\xa9\x6c\x97\x86\x65\xc1\x3d\xfe\x00\x11\xdc\xfd\xbc\x68\xfa\x19\x25\x3a\x95\x84\x09\xaa\xcd\x80\x36\x88\xbb\x90\x73\x82\xd8\xef\x91\xf5\x34\xc4\x41\x10\xbb\xad\xa1\x0f\x7f\xe5\x1c\x90\xa2\xdb\xf9\xf0\xae\x96\xb7\xa3\xda\xdc\x3b\xe2\x71\xb1\xe9\xdf\xd2\xf8\xc1\x8e\x12\x4f\x61\x3d\x4b\x6b\xd1\x60\xce\x15\x77\xb2\x69\xe7\x98\x5b\x2a\xc4\x3f\x41\x80\x82\x34\xaa\x60\xe1\xde\x2a\xe2\x1b\x6f\xf6\xd7\x27\x20\x2d\x20\xe9\x0d\x25\x1e\xd3\x11\x41\x81\x77\x6f\x5a\xbc\x1c\x80\x0c\xfa\x84\x6d\x93\xad\x72\x5e\x15\x76\xd7\xe9\x58\x2c\xb3\x17\xfa\x96\xfc\x84\x76\x70\xb7\xf5\xb6\x4c\xd4\x0b\xcc\x7a\xda\x42\xa7\x56\x4f\x2f\x61\xe4\x04\xae\xc6\xc7\x73\xd5\x6e\xf6\x80\xea\xc0\xf6\x41\x52\x20\x5f\xd3\xa6\x97\xa2\xaf\xe6\x1b\xd4\xcf\x0d\x42\x61\xe9\x15\xb9\xff\xa4\xab\x7c\x73\x52\x20\x2b\xbb\x48\x6f\xab\x8c\x97\x55\xb9\x9f\x31\x4d\xd1\x6e\x89\x2e\xb8\xa0\xae\x91\xe8\x4d\x3a\x82\x77\xa5\xfd\x9d\x9b\xbe\xde\xbe\xb8\xc9\x49\x9d\xec\x76\x31\xb4\x7c\xea\x99\x56\x29\x16\x18\x48\x94\x07\xc0\x6e\x15\xff\xa9\x60\xfd\x68\xc9\xaf\x74\x89\x54\x17\x52\xde\xe3\x8a\x76\x96\x7d\xd1\x4f\x72\xad\xfd\x20\x1c\xdc\x34\x3e\xee\xda\x7d\x04\x68\x41\xf7\xeb\x9c\x79\x71\xdf\x85\x46\x83\xa8\x38\x12\xa3\xc3\x13\x5b\x59\x7d\x2a\xb6\x13\xa1\xb9\x27\x68\xd0\x87\xba\x92\x43\x93\x91\xdc\x8c\x80\x5a\x1e\x76\xe1\xa4\xc0\xf4\x97\xb6\x5b\x36\xc7\xfa\x1d\x35\xda\xa9\xc3\x89\x7b\x1c\x13\x7c\xd5\x3f\x80\x14\x93\xc9\x74\x26\x79\x03\xab\x89\x33\xa9\x5d\x22\x68\xf6\x03\xdb\x09\xfe\x14\x34\x30\x0b\xc7\x07\x7e\x24\xc9\x03\x77\x67\xf1\x9c\xfc\xe5\x9c\x8f\xd1\x60\x6d\x6a\xce\xcf\xf9\x25\xc8\xbb\x76\xf0\x3a\xb3\x82\x4d\x1f\x97\x38\x10\x03\x7d\x98\x70\x15\xf2\x8f\x9b\x23\x56\x73\x51\xf7\x39\xee\x68\x9f\x9e\x78\xd0\x0b\x5f\xa8\xee\xb2\xff\xf4\x67\xc0\x93\x15\x4c\x69\x65\x1a\x62\xe4\xea\xb6\x6e\xfa\x35\x3b\xed\x25\x07\x87\xc1\x30\xab\x8d\x70\xe3\xd8\xff\xd6\x74\x7d\xdf\x49\x28\x50\x14\xe4\x82\x8d\x97\x25\xca\xfc\x94\x9a\xc6\x16\xda\xc0\x1e\x9d\x46\xea\xe0\xd8\xb1\xe9\xd8\xcb\x87\x8c\x48\x53\x6e\xe2\x44\x1f\xee\x81\x7c\xbd\xc8\xb4\x7b\x55\xb9\x12\x2d\xa1\xee\x61\xd3\x3b\xc2\xd8\x2b\xc7\x0b\x09\x2c\x08\x9a\xeb\x6a\x12\x7d\x6e\x89\x2a\x7f\x04\x9d\x25\x24\xb7\xd7\x66\xda\xcf\xde\xda\x22\xfc\xe7\x43\x01\x66\x0f\x62\xb1\x30\x21\xe1\x25\x2e\x9b\xbb\x57\x3e\x67\x3e\x12\x1c\x28\xee\x2d\xcd\xa7\x6c\x3a\x7e\xa8\x85\x0a\x14\x59\x25\xc4\xfa\x40\xe3\x0e\x67\xcd\x0a\x63\xb5\xca\x10\x46\xec\x1d\xe1\xf3\x2b\x2c\x63\x1f\x07\x97\x32\x45\xca\x1c\xb8\xa2\x4e\x9f\xf6\x1e\x51\x88\x2c\xe0\x45\x16\xc6\xf5\x3a\x47\xc8\xcf\x58\xf8\x05\xce\x9a\xa3\xec\xb5\xab\x15\x9a\x51\xac\xda\x6c\x69\x37\xab\x65\x3f\xc0\x99\x34\xca\x27\xe5\x31\x5a\xca\x4e\x33\x15\x96\x2a\xf7\x99\x2d\x2f\x71\x0b\xe3\x75\x30\x30\x9a\x93\x2c\xfd\x4b\xb3\xf6\x29\x0c\x69\x1e\xc8\xec\x00\xae\x02\xba\x67\x96\xc5\xef\xfe\xa4\xc4\xb7\xf6\x13\x54\xae\x90\x87\x1c\x13\x46\xc7\x3c\x8e\x47\xd0\x16\x52\x5e\x1e\x5d\x3c\x82\x8d\xc5\x15\x26\x76\x7f\x1d\x07\x7e\x3a\xfa\x69\x7f\x9b\x1f\xf3\x12\x79\x9c\xa1\x0b\xc6\xa8\xeb\xe1\xb9\x8d\x3e\x34\x72\xe6\x41\x77\x22\xc7\xdb\xe6\x96\xe5\xd9\xb9\xf0\x23\xf6\xc0\x07\x26\x7e\x93\x6d\x30\x93\xb3\x04\xf5\xfb\x3b\x49\x50\x14\xcf\xd4\x16\xc2\x23\xe7\x86\x78\xcd\x0e\x2c\x79\xe0\x62\xff\x2b\x13\x70\x05\x32\x0f\x56\x9d\x87\xfa\x4a\xb4\x2b\xec\x3c\x12\x8b\x99\x76\xde\xa2\x2b\xec\x98\x55\xf3\x8c\x0f\x7f\xdb\xa1\xc6\x94\x52\x71\x57\x45\x61\x05\xc1\x38\x06\xf9\x3d\x5f\x4a\x12\xaa\x33\x0b\x18\x10\xc3\x18\x84\x67\xcd\x1d\x30\xe8\x13\x87\x75\xdf\x45\x6e\x6b\x2d\x1d\xd2\x74\x8d\x32\x80\x13\x06\x3e\xc0\xdf\xaa\x7f\x07\x30\xa6\x9c\x2d\x98\x00\x75\xcc\x60\xfb\xa7\x26\xd9\xab\xdc\x84\xb4\x0b\xc3\x68\x81\xce\x8e\x91\xe9\xb2\x56\xea\xc8\x03\x5a\x77\x67\xce\x5f\xfc\xa2\x1b\x9b\x92\x18\x5e\x10\x50\xbc\xbf\x47\xb7\x70\x37\x95\x93\x3b\x61\xff\x88\x07\xe1\x91\xd1\x9a\x9a\x3f\xe5\xc3\x21\x20\xc4\x36\x65\x16\x3c\x3b\xb3\x5a\x01\xe6\x3e\xa2\x4c\xa9\xf9\xb6\x94\xb6\x00\x9a\x42\x8b\xac\x01\xef\xdf\xea\xc3\x57\x07\x70\x68\x0a\x38\x2f\xc3\xea\x72\x66\x2e\x56\xc0\x49\x92\xb3\x87\xa3\xe0\x84\xc7\xec\x73\xd8\x26\xd7\x4b\xbf\x0e\x5b\x23\xb2\x69\x61\x3b\x4b\x9e\x39\x22\x90\xfb\xca\x0d\x7e\x30\x1b\x30\x8e\x11\xe8\x80\x59\x17\x9d\x7c\xb4\xbf\x48\x79\x21\xdf\x44\x67\x27\xb0\x17\xa1\x87\xaf\x0c\xe4\xd5\x5c\x00\x7b\xf5\xe8\x6c\x37\xf9\x2f\x3b\xd1\x41\xc0\x80\x3f\x7c\xfa\x37\xda\x7c\x56\x40\x7f\xb9\x57\x0e\xc1\x84\xcc\xe3\x1e\x41\x75\x3f\x4d\x38\x5a\x12\x40\x01\xc9\x76\x7c\x2b\x8f\x75\xe5\x63\x15\xf9\x98\xd2\xe3\xbf\xa9\xad\x5e\x76\x29\xd9\xd8\x86\x04\xe2\x80\x8e\x6d\x96\x7a\x41\xd2\xc4\x0a\xe2\xb2\x1d\xe7\xe0\xd5\xf5\xb3\x28\xe1\x3c\xa7\xe7\xbd\x16\x9c\x58\xc1\x15\xda\x75\x8f\xbb\xa0\x37\x45\x9b\xfa\x8f\x30\x8f\x16\x4c\xb0\x54\x27\x93\x94\xf2\xfb\x98\xb1\x4d\xca\xde\xa3\x83\xab\x99\xc3\xfb\xbc\x79\x59\x80\xc9\xf0\xe4\xa7\xb1\x89\xe9\xad\xbc\x07\x12\x8e\xc6\x41\x0c\x4d\x67\x4d\xc7\xa5\x8c\x04\xed\xe8\x77\xc5\xf3\x42\x05\x2c\xa4\x74\x65\xbc\x18\x64\x35\xc5\xde\x2d\x52\x36\x6c\xbe\x67\x0b\xbf\x2a\xa2\x89\xc1\xd6\x38\xe8\xc4\x31\xe7\x64\x44\x15\xf1\xc6\xb1\x5b\xaf\xcc\x9a\xb2\xf7\x9e\x8d\xf9\x9f\x4b\xb1\xa6\x0f\xbd\x2d\x41\x05\x36\x7e\x07\x42\xf9\xd7\x4a\x18\x7e\x80\xa9\xf3\xce\x24\x32\xc1\xe0\xcc\x94\x92\x70\x8b\xa3\x80\x9b\xf2\xda\x5c\x69\xf5\xc3\xe6\xcc\x59\xa0\xca\xef\xaf\xc4\xea\x87\xf3\x38\x55\x24\xe3\x1c\x7b\xd4\x52\xb3\x76\x9d\x40\xb9\xc3\x7b\x77\xa9\xfd\x28\x83\xc4\xba\x67\x03\xda\x1c\xa9\xcc\x94\x18\xd3\xd4\xaf\x07\x8d\xfb\x89\x0b\xbe\x1b\x33\x4f\x9e\xd7\xf9\x29\xc6\xcd\x89\xa8\x4f\xd9\x1b\x3f\x41\x71\x10\xb8\xad\xf8\x36\xc7\x16\x18\x69\xf1\x59\xdc\xf1\xea\x68\x9f\x4d\x78\xf7\xfc\xe2\x5c\x0a\xcc\x71\xeb\xbf\x23\x3a\x9f\x79\xde\xdc\xca\xa7\x83\xe4\x3d\xed\xff\x28\xc7\x34\x7d\xb5\xb9\x79\x70\x73\xa2\xa9\x5b\xc4\x95\x80\x4d\x52\x66\x47\x45\x51\xef\x54\xf3\xbd\xdb\x38\x92\x53\xe2\xd3\x19\xfd\x38\x7d\x2f\x1a\x13\xda\xda\xe4\xd4\xb5\x4c\x32\xba\xcc\x6f\x03\x89\xcb\x7e\x06\xc7\xfc\xd6\x52\xb4\x21\x87\x7d\x05\x6d\xc9\xdf\xd5\x5a\xd0\xf1\xe9\xc9\x97\x1f\x6b\x1f\xda\x7d\x6c\xb5\xd5\x93\x4e\xde\x83\xc8\x9c\x3c\x1e\x41\xb9\x7f\xeb\xdb\x24\xee\x55\x2f\x33\xbe\xfe\x01\xa1\x01\x73\x05\x69\x72\xb6\x01\xe2\x46\xc0\xd5\x42\xb7\xde\x0a\x61\x03\x26\x6b\x52\xb4\x37\xd9\x42\xb8\xab\xd9\x26\xd7\xaa\x85\x17\x42\xf5\x49\x3f\x4f\x9d\x79\x20\x65\x12\x15\x32\x68\x0a\xe9\x0b\xdf\x13\x86\x40\x04\xb1\x2c\x93\xc5\xf1\xbf\x9d\x6f\x99\xb4\x60\x37\xc6\x7f\xf1\x55\xfa\x79\x6c\x03\x99\x81\x86\xcf\x7d\x0f\x46\xc4\x52\x8f\x89\x87\x40\x70\x0a\xe4\x9b\x62\x5c\xe4\x8c\x80\xdd\x8b\x11\x27\xf8\x5c\xec\x3e\x4e\x09\x3e\x19\x1b\x5b\xa7\x68\xc9\xc2\x72\x7f\xc2\x3c\xea\xa9\xd4\x73\x35\x38\xc7\x84\x38\xae\x30\xe0\xd9\xc9\xcb\x2a\xb3\xfe\xa0\xf2\xff\xf1\x43\x82\xde\xc8\x55\x11\x08\xe2\xd9\xa8\xf0\x62\x6b\x47\x5f\x39\xa3\xb4\x10\x78\xbb\x08\x30\xcf\x0b\x54\x64\x21\x29\x27\x0d\xb1\x4d\x6e\x08\x69\xab\xb8\x8a\x83\xf5\xcf\x15\xe3\x95\x45\x32\xca\x5d\x38\x9c\x84\x10\x5f\xd7\x01\xb0\xde\x35\xcb\xed\xa8\x8b\xb7\x54\x9d\xee\xef\x69\x0d\x4a\x3e\x93\x12\xc2\xb4\xfb\x53\xad\x6b\x1d\x77\x76\x5a\x69\xc0\x1b\xbc\xd9\x9b\xa1\x8f\x27\xfc\x13\xda\x79\xca\xa5\x73\x74\xb1\x25\x66\x48\xd9\x8d\x6d\x68\x94\xdf\x89\x7d\xd7\xbc\xfa\xd2\x21\x9e\x41\x3e\x14\xde\x18\x27\x05\xc0\x20\x19\xad\x63\xc3\x16\xde\xe6\xe7\x72\xb9\x1f\xc3\xeb\x79\xbd\x67\x1e\x6a\xa0\x95\x2d\xf7\xc1\xf3\x28\xfd\xfb\x36\xd0\xa3\x69\x15\x48\x74\x0b\x10\xfa\xac\x06\x0d\x0b\xb3\xf6\xbf\x39\x48\xda\x1f\x77\x8d\x2e\xb5\xf6\x08\x87\x2a\x0a\x30\x1b\xcd\xf3\x62\xd1\x13\x3d\x5d\xed\x65\x9b\x48\xd2\xc4\xde\x90\x07\xf6\x4e\x3a\x48\x52\xec\x15\xe4\x6c\xa1\xc9\x3d\x14\x2c\x8e\x10\xd9\x7c\xe6\x81\xb2\x0e\x15\x0f\x32\x63\x18\xb4\x16\xb9\x9a\x67\xe1\xe3\x30\xb2\x43\xb9\x4c\xa0\x84\x3c\x47\xa5\x93\xb6\x91\x19\x61\x9d\x2c\xe7\xe2\xc8\x99\xdf\x1b\x3e\x80\x9b\x44\x63\xb9\xe4\x21\x2b\x67\x17\x3b\x31\x2f\x82\xfb\xaf\x2f\x3b\xf1\xef\xa8\xc6\xc9\xb9\x9d\xba\xb3\xe5\xad\x56\x33\xca\xb5\x2f\xb6\xc1\xfd\x7c\xd2\x6c\x8e\x3e\x08\xf3\x29\xbb\xa6\xff\x4d\x5d\xd2\x58\xec\x4e\x44\xf3\x6e\x16\xe0\x89\x30\x62\xea\x5b\x36\x69\xe8\x7a\x11\x3c\x96\x4e\xe6\x50\x5d\x5c\x1b\xee\xfc\xc6\x23\x32\xe7\x72\xab\x40\xc7\x46\x02\x34\x78\x84\xc8\x1c\x67\x76\xb1\xe2\x02\x5f\xe9\x79\xd8\xbc\x07\x60\xd5\x26\x0f\x0c\x50\x44\x3a\x29\x20\x63\x67\x04\xa9\x22\x76\x68\xa9\x93\x50\x0a\xd4\xd1\x12\xbb\x8a\x77\xbe\xd5\xf9\xbd\x72\x44\xf7\x74\xd2\x98\x9a\x04\x05\xee\x7f\x30\xf8\x65\x9a\xe9\xa3\xe0\x11\x62\x17\xd9\xf4\x58\x95\x0a\xa9\x03\x4d\x92\x45\x97\x79\x66\x44\xb5\x6a\x8c\x61\xbb\x6a\xc4\xc4\x5d\x81\x0e\x16\x53\x18\x76\x2a\xde\x02\x78\x54\x21\x0a\xe5\x73\x80\xbd\x21\x98\x1a\xe0\x45\x24\xde\x28\x1d\xee\x30\xd7\xe8\x1b\xdd\xf3\xd2\x97\xde\xa9\xb7\x8f\x9e\x35\xb6\xd6\x27\xdd\xf6\x38\xd1\x62\x8e\x4d\x90\x80\x52\x88\xec\xbe\x6d\x7f\xed\xfc\x37\x21\x9d\x9e\x4f\xda\xd5\x6a\x58\x5b\x89\x4c\x16\xa3\xd6\x1c\x1d\x48\x34\x4b\xea\xd6\xd4\xca\x89\x1d\x0a\x27\xe6\xa6\x29\xb0\x27\xa2\xff\x44\x5e\x6a\x10\x8b\xe5\xe2\x11\x4b\x54\x24\x76\x3d\x38\xdf\xcf\xf4\xd8\xfb\x9e\x33\x69\x0a\x70\x43\xf5\x1f\xc7\x57\xa5\x51\x15\x3c\x07\x28\xfe\xb3\x86\x9c\x36\x1d\x50\xff\x08\x08\x51\x73\x0d\x2d\x84\xeb\x83\xa8\x71\x48\x71\x8f\x6e\x54\xab\xbb\xf6\x29\xb8\xde\x98\xd7\x6e\xee\x3d\x0d\x0d\xf2\xbc\x63\xea\xe5\x0a\xd2\xd0\x3f\xde\x65\xe2\x78\xd2\x0a\x02\xd8\xd3\x4c\xca\xe7\x41\xa4\xd9\xef\x96\x05\x47\x44\x6e\x61\x33\x00\xb7\x18\x3a\xef\xeb\xf1\x60\xc0\xf5\x23\x96\x18\xb7\x73\x17\x56\xc5\x04\xab\x3e\x28\x2e\xec\x5e\x48\x8c\x69\xb9\x04\x08\x4e\x83\x2c\x0a\xfe\xe2\xa2\xf0\x7c\x90\xb1\xf4\xda\x87\x44\x93\x7a\x13\x4c\x56\x65\x1c\x87\x02\x2f\x2c\xa7\xd4\x79\xc3\x06\xe4\x99\x63\x32\x07\x60\x0f\x5f\xb4\x41\x46\x76\xad\x9e\x11\x15\xea\x61\xce\xc0\x1a\xcd\x45\x53\x47\x9f\x80\x86\xe9\xa5\xff\x39\x1d\x6d\x01\x83\xe1\x40\x50\x7e\x35\xca\xeb\x43\x8d\x27\x68\x58\x32\xde\x48\x3f\x23\xd3\x82\xe8\x54\x0f\x39\x64\xb2\x52\x44\x54\xea\x1c\x0c\xc0\x5c\xfc\x5b\x46\x62\xe7\xca\x69\x68\x5b\x46\xfb\xf9\x12\x04\x00\xf1\x06\xd2\xa3\xbe\xb7\x3b\x6a\x90\x98\x54\x47\x8b\xd6\xe6\xee\x92\x04\x94\x32\xf0\xd9\x65\x25\x7b\x9b\xcb\x84\xf9\x99\xbc\x47\x44\xf6\xe9\x50\x52\x1d\x09\x03\xc5\xdb\x62\x23\xb5\x60\x38\xce\xd8\xd5\xf8\x0f\xbc\xaf\x42\xc0\xb2\xbd\x5f\x63\xef\x6b\xc7\x12\x08\xa7\x52\x14\xbb\x71\x70\x90\x3a\x8d\x12\xd6\x22\x12\xe3\x0d\x75\x64\x97\x4d\xb3\x2f\x6b\x87\x6c\x38\x00\x6b\x04\x11\x82\x5d\x5a\x4e\x32\x97\xcb\x0b\x76\xaf\xbd\xd5\xf2\xa9\x2a\x0d\xda\xc7\xed\x9e\x6e\xf4\x3f\x4c\x3f\xf1\x2b\x1a\xd8\x2b\x9a\x1b\xb1\xcc\xf9\x8a\x17\x83\x24\x8f\x44\x84\x2a\x27\x29\x32\x7e\x3a\x46\xff\x10\x43\xa1\x11\x63\x08\x4a\x27\x0b\xb3\xa8\xee\xef\x98\xbd\x09\x92\x4c\xad\xb8\xae\x86\x85\x72\xa7\xd4\xc0\x54\x87\x22\x29\x8c\xc1\xe2\xf9\x1f\x29\x69\x2a\x8c\xa8\xde\x9f\xb2\x36\x7e\x61\xcb\x47\x44\xb0\x10\xba\x3c\x72\xa3\x0c\xa3\xf0\x66\x62\x14\x87\xd4\xf3\xc9\x9f\x60\x17\x1f\x5c\x85\xc7\x61\xcc\x15\x4a\xa5\x5c\xcb\x8c\xd5\x29\x80\xd7\xcf\xfc\xb6\x4e\x21\xb8\x2b\x85\x82\xcf\x72\xb9\x51\x3a\xdd\xcb\xaf\x5e\x3a\x20\xc2\x11\x53\x6c\x90\xae\xee\xcf\xe3\x3c\x60\x51\x11\x54\x6f\x17\xa1\x16\x84\xdf\xb5\x7d\xcd\xbd\x72\x9c\xc9\xbb\x4e\x4f\x9f\x3e\x72\x98\xb2\x79\xe7\x56\xfe\x07\x8f\x31\x62\xbd\x11\xeb\xfc\xb7\x7f\xe1\xea\xc3\xee\xd4\xf6\x24\x80\x8c\xb7\x56\x70\x1f\x96\x44\x04\x60\x28\xd3\x20\xe4\x09\xdc\x53\x0c\xba\x4c\x63\xfe\x89\xd9\x03\xf9\x6c\x8d\x58\x6a\x42\xcb\x19\x60\xf3\x04\x41\x2a\x10\x81\x3b\xfc\x85\x40\x38\x6e\xbb\x4a\xdc\x23\x72\xfc\xfe\x06\x6e\x79\xea\x5a\x1a\x13\xa7\x32\xe2\x7a\xd9\xc8\xec\x9d\x37\xd5\xa5\x1f\x3c\xfe\x4b\x4c\x69\x15\xd3\x39\x8e\xa6\x76\xbf\x8e\x4b\x40\x76\x2f\xcb\xaa\x4d\x39\x43\xee\x57\x42\x61\x73\x45\x3d\x7f\xb3\xc8\x0f\xc0\x69\x94\xdd\x97\xc6\xb8\x97\x0c\x10\x87\x77\x8a\xc5\xe4\xbb\x32\x7a\x75\x76\x55\xcd\x0c\xe5\xff\x14\x7a\xb6\x0f\x7c\x36\x35\x9b\x29\x31\x9c\xf6\x01\xed\x14\xb9\x5a\x09\x0a\xcb\xc2\xa3\xb2\x21\x17\xa6\x83\xa4\xb9\x23\x19\xa1\x4a\xf5\x5a\xab\xaf\x83\x41\xd1\x4b\xeb\x47\x40\x79\x20\xf4\xcf\xe9\xfc\x57\x54\x84\x11\xa7\x04\x83\xe3\x9a\x1c\xa7\x83\x2f\x35\xcb\xe1\x83\x4c\x55\xb8\xfc\x28\xb6\x7b\x6b\xc0\x63\x2f\x2e\xaf\xc1\x73\xe5\x57\x02\x4d\x67\x6e\x9e\xbf\x0b\x8d\x75\xd4\xc6\x36\xe3\x07\x30\x1d\x0b\x25\x32\xbc\x3d\xbe\x69\xbb\x3b\x7f\x20\x85\xc0\xf3\x4a\x03\x18\x63\x78\xeb\x77\xb8\x5d\xef\xeb\x50\x23\xb2\x49\xb3\x8a\x33\x06\x58\x43\x2c\x12\xe3\x41\xb7\x8e\x3c\xf6\x78\x15\x16\x73\x0c\x3a\x96\x8d\x99\xfa\x80\xf0\x71\xbc\x4a\xc5\x1e\x7a\x9d\x83\x92\x2c\x4c\x1e\x8c\xc8\x24\x65\x07\x98\x84\x6c\x98\xbd\x4e\xb0\x58\x59\xce\x22\x5f\x29\xb7\x86\x00\x8b\xaa\x2a\x7b\x47\x4c\x47\xa1\xa1\x48\xec\xb5\x98\xf8\x0a\x6b\xba\x51\x8f\x6e\xb6\x23\xbb\xe3\x51\x1a\x37\xf9\x5a\x11\xfa\xa4\x93\x3a\x11\xcf\x4b\xe0\x7e\xec\x2b\x81\x28\xa3\xfe\x51\x90\x4a\x99\x03\x37\x3f\x9c\xfa\x6d\xfa\x31\x42\xb2\x1b\x45\xb6\x90\xfa\xe7\x44\xfb\x23\x05\xe5\xef\x33\x6d\xee\xfb\xae\xbd\xa4\xd2\x88\xc4\x67\x46\xda\x43\xde\x90\x15\x20\x74\x39\x0d\x7e\x12\xda\x09\xab\xff\xec\x7b\x54\x1e\x33\x7e\x4b\x7b\x1a\xad\x2f\x52\xe8\x76\xa2\x57\xe2\x07\x85\x6a\xc6\x63\xcd\x16\x16\xd4\x1a\xc0\x14\xfe\xb0\x5b\xa6\x9f\x40\x0f\xd8\x04\x76\x68\xb2\x23\x56\x78\x0c\xa9\xea\x3e\x90\x73\x15\xa5\x18\x66\xd4\xf3\xff\x00\x68\xb8\x39\xd2\xc9\x4a\xf5\x99\xa5\x30\xf3\x95\xc7\x18\x1f\x99\x66\x6b\x2c\x80\x49\x9c\x74\x4f\xe1\x8e\xa6\x9d\x9c\x85\xb3\xe9\x3d\x09\x7b\xaa\x47\x22\x1e\x47\xca\xe0\xc3\xba\x8c\x94\x1d\x89\x78\x44\x8e\xe2\x51\x34\xad\x7a\x2e\xc7\xd6\xb3\xf8\x61\x99\x44\x5a\x01\x4b\x21\x4a\x3a\x7c\xef\xe9\x0f\x87\x54\xc7\x5a\xac\x28\xb2\xef\x02\x67\x80\xca\xa3\x0c\xe6\x22\x1f\x36\xc2\x53\xc4\x24\xd3\x1f\xcd\x4a\x44\xee\xfe\x59\x90\x21\xdb\xc6\x3d\xa0\xa7\x10\x89\x5d\x9d\xce\xfa\x28\xd9\xfc\x38\x95\x1b\xba\x5d\xa0\x53\x9d\xf7\xb1\x41\x7f\x37\x2e\xd8\x99\xbb\x56\x47\xcc\x1e\x37\x4a\xfd\xa4\x85\x7b\xe4\x9a\x14\x2f\x9d\xd5\xba\x90\x0d\xd1\x92\xdf\x42\x91\xdf\x1f\xa9\x42\x26\xa6\x68\x5f\xde\x95\x30\x67\xe6\x4a\x78\x94\xfb\x8c\x6b\xdc\xe8\x72\x07\xcb\xba\xa9\x00\xe8\xa6\x1f\xc9\x37\x79\x62\xc5\x8c\x7a\xfa\x86\xcd\x37\x84\x30\x00\x38\xca\x9c\x1d\x23\x89\x23\x3a\x88\x21\xa8\x34\x26\x09\xa8\x20\xd6\xc8\x3c\x64\xae\xdc\x0a\x17\x89\x22\xf6\x75\x72\x68\x1c\x7f\x7c\xd2\xd0\x8a\x8b\xd2\xc2\xf1\x97\x3f\xf6\x3c\x3b\x86\x8f\xcc\x51\x88\x1d\x8f\x5b\x3c\x54\x64\x0d\x0c\xeb\x05\x85\x1e\x26\x39\x22\x39\x8e\xb4\xd4\x9b\x35\x1a\x57\x2c\x5f\x6c\x1d\x6d\xfe\xa3\x2c\x9e\x61\x84\x13\x1e\x9b\xe9\x2e\x20\x4e\xc9\x87\xab\xad\x85\x73\xd4\x56\xdb\x49\x96\x17\xcd\x6c\xdb\x77\xaa\xac\x50\x82\xa9\x79\x5f\x0a\x74\xe4\x6d\x83\x19\x24\x84\x86\x0e\x95\x93\x46\xe9\xe1\x7c\xeb\xe4\x00\x9d\x9e\xda\xa8\xf6\xd0\x18\xf8\x4c\x1f\x54\xc1\xa7\x4b\x13\x6f\xb8\xf9\xd4\xb9\x22\x2d\xce\x62\x96\x2e\xc8\x23\x64\xd8\xb1\xf9\x21\x7e\x57\x34\x7f\x17\xca\x69\x62\x45\xba\xdf\x10\xf3\xd9\x2a\xf1\xa3\xc1\xba\xa3\x83\xca\xed\xfe\x7d\x80\xad\xe0\xf2\x77\x94\x03\xcd\x20\x53\x78\x77\xce\x50\x21\x86\x5c\x07\x8d\x0b\xb4\xcb\x84\x78\x27\x01\xc3\x29\x68\x0d\xff\x6f\x2a\xfc\x86\x2a\x4e\xf5\xda\x64\xac\x08\xaa\x80\x4a\x89\xcb\x40\x60\xb0\xe1\xaf\x4f\x17\xe0\x57\xcb\xcd\xd2\x14\x2f\xc5\xfd\x25\x69\xa2\x39\xbb\xdb\x60\xa7\x55\x28\x1e\x78\x64\x67\x9d\x91\x0f\x50\x2f\xdf\xf6\xa1\xc0\x12\xa9\xca\x3a\xee\xb7\xf3\x82\x54\xac\x1d\xf7\x34\x7b\x38\xf9\xf7\x60\xf7\xd2\xda\x5a\x2f\x60\xbe\x80\x22\x00\xa8\x7a\x25\xda\x3d\xf2\x93\x74\x00\x86\xbd\x67\x4c\x1a\x5f\x96\x57\x2c\x22\xab\x6e\x65\x86\x27\x50\x9a\xab\xaf\xe3\x8a\x9c\x15\xdb\x74\xf5\x20\xe5\x51\x0e\x57\x62\x77\xfa\x25\x9b\x53\x10\xc9\xda\x82\xfd\x0d\xd3\x88\xdf\xf3\xb1\x31\x0e\x94\x72\xf7\xff\xb9\x25\x4c\xc2\xfc\x64\xd1\xd2\xe6\x64\x4f\x4a\xd8\x0c\x25\x86\xd7\xdf\x0b\x7e\x1d\xe7\x48\x03\xb5\xb3\x16\x66\x0e\xfb\x6c\x9e\x72\xf5\xde\x9c\x1b\xf2\xff\x3c\x94\xb9\x90\xef\xa6\x56\x75\xc0\x85\xd7\xc4\x12\xe9\xf3\xea\xb6\xde\xd9\x9e\x0f\x8e\x83\xc5\xca\xec\xca\xfa\xeb\x0d\x21\x8b\x5b\xc7\x9c\x57\x83\xc1\x7a\x86\x52\x14\x44\x33\xdb\xae\x0e\xda\xe6\x61\xf8\xb8\x89\x12\xfe\x21\x0e\x2a\x6c\x1e\x14\x8b\x5b\xef\x4a\x00\x6f\xec\xd0\xa9\x7e\x41\x3c\xc6\xa1\x87\xe9\x85\xd4\x2c\x15\x92\x80\x38\x7b\x01\xb6\x88\xe0\xd2\xbd\x31\x24\x6f\x27\xc3\x00\x98\x31\x97\x9b\xce\x0a\x75\x39\x89\xef\xee\xfd\x1f\x5b\x6f\x8b\x25\xca\xd5\x4b\xbb\xd6\xac\xbf\x53\x6a\xab\x79\x38\x2d\xcf\xa7\x39\xcb\x81\x2d\xba\x81\xef\x6e\xfe\x01\xdd\x64\xaa\x1e\x00\x97\xc3\x80\xba\xc9\x53\xbe\xc9\xd6\xf9\x8d\x09\x82\x52\x13\x3b\xc2\xed\x3c\xe0\xdb\x8b\xeb\xef\x05\xa4\x6b\x99\x81\xc9\xf7\x32\x83\x76\x77\xdb\xc7\x12\x79\x13\xb9\x4b\x79\x69\x46\x45\xce\x39\x62\xdc\x86\x1f\x92\x33\xce\x94\x42\x35\x8b\x2a\xdb\x0e\x51\x68\x26\xb8\xdf\x0b\xb9\x73\xb7\x59\xf7\xf5\xb7\x76\xb5\x1a\x2b\xab\x3c\xfa\x63\xe2\x41\x26\x83\x1b\x12\x10\x42\xcd\x66\x22\x53\x70\xb0\xcb\x46\xa2\x0b\x2c\x71\xf9\xd4\x4c\xe3\xdc\x6e\x11\xd5\xdb\x22\xfc\x5a\x91\x2a\xf5\xad\x53\xc1\xc8\xe5\x29\xff\xcd\x16\x32\x40\x2a\x15\x44\x75\x88\xc5\x8f\x0e\x00\x05\xfd\x80\xfc\x3c\x5b\xd5\xf3\xf3\x0d\x64\xa6\x58\x05\xa8\xf3\xff\x7d\x33\x45\x96\xb3\x03\xb5\x6d\xd4\x47\x03\x1d\x9a\x03\x2c\x89\x2c\x02\x55\x1f\x31\x80\x99\xf2\x13\x10\xa1\x27\x54\x15\xde\x09\x5a\x9e\x16\xc2\x92\x08\x18\xab\xa4\x4c\xc7\x4f\x7c\x88\xd9\xea\xa4\xfc\xc5\x6f\x2b\x5a\x1a\x74\x4a\xcb\x3a\x7b\x61\xaa\x7b\xa0\x16\x6b\x32\x6e\x74\x1b\x82\xe6\x9e\x43\xe4\x05\x53\x93\x2d\xce\x54\x57\xdf\xf0\xbe\x5d\x0a\xd8\xba\x4d\xda\xcd\xb0\x65\xf7\x5d\x0c\x68\x81\x74\x0c\x2c\x64\xc8\x9e\xf2\xf8\x0c\x7a\xf5\x99\xee\x17\x80\x4e\x35\x2c\x2d\xc7\x14\x00\x2e\x84\x51\xb8\xe1\x13\x4b\x0d\xa3\x21\x9d\x95\xce\x9a\x1c\x30\x02\x50\x51\x0b\x4a\x4e\xf7\x20\x2f\xaa\x1e\x5c\x7f\x4c\xba\x5a\x12\x9f\x3b\x59\x6d\x1e\x16\xa1\x2f\x66\x2c\x6a\x2c\xde\xd8\x8a\xaf\x8d\x57\x77\x49\xbf\x81\x30\xba\x8c\x1e\x9e\x5a\x48\x5c\xf8\xd8\xc3\x96\x2f\x13\xc3\x76\x57\x82\x5d\x2e\xaf\xb6\x7b\x94\x9f\x6a\x62\x69\x4c\xe1\x3c\x81\xdc\x82\x5b\x2f\x2e\xa6\x90\x1f\x90\xec\xb6\x6b\x15\x0d\x30\xff\x74\x7f\xd0\x43\x7f\x41\x6c\x33\x48\xf1\xc4\xa5\xd4\x9d\x1f\xa4\xa1\xb3\xde\x93\xab\x50\xe5\x11\xe7\x5a\xee\x55\x31\x43\x02\x6f\x4b\x88\x41\x63\x0a\x21\x79\xda\xf5\x26\xf6\x23\x47\x7e\xc5\x62\x5c\xa3\x60\x5e\xae\x83\x47\x3a\x35\x03\x13\x52\xb4\x67\x60\x7c\x0f\x17\x31\x01\x98\x8c\x44\x0b\x4e\xfe\x28\x78\x2b\x0d\x77\xe4\x98\x64\x4a\x70\x2e\x41\xe8\x56\x9b\x1f\x29\x0a\xfd\x36\x6f\xe4\x8f\xda\x96\x2c\x6f\x47\x44\x72\x51\x31\xcf\x61\x51\x85\x59\x76\x1e\xa5\xce\x1e\x29\xf2\xf6\x9a\x19\xc1\x49\x60\x7c\xc3\x44\x34\x02\x06\x3a\x97\xc0\xc5\x1e\x8a\xe8\xdb\x98\xe3\x9f\x7b\x6a\xe6\x67\x77\x80\xc6\xaa\x85\x3f\x83\xb9\x88\xff\x2c\x2b\xe8\xb6\x78\x37\xdd\x0b\x32\xed\x9c\x86\x47\x4c\x9d\x89\x92\xd7\x01\xf8\xa6\x30\x6c\xf1\x8a\xbf\x60\xd2\xdf\xe7\x66\xe6\x9f\x41\x3a\x56\xe6\x85\x7c\x6f\xff\x1f\xb4\x8d\x6a\x63\xa3\xd2\xd2\x37\x0f\x75\x0d\x34\xb3\x29\x94\x23\xde\x4e\x3b\xe0\x8f\x97\x92\xb6\xeb\x76\x9d\x49\xb9\x33\x70\x26\xf0\xa1\x99\xc1\x24\x80\x31\x78\x8b\x27\x3c\x35\x44\xeb\xb0\x3d\xa3\x1e\xa9\xd9\xeb\x6c\x9a\x20\x8b\xbe\x27\xa0\xf5\x01\x39\x09\xd4\xd4\xcd\xf3\xbd\xe3\xed\xf6\x75\x18\xda\x58\x1a\xb0\x99\x6e\x8b\x8a\xb7\x40\xfc\xc9\x38\x0f\x15\x14\x6e\xa1\xbc\xb9\xa4\x2b\x7f\xee\x05\xd4\x00\x82\x45\x80\x00\x80\x2e\x16\xca\x90\xbf\xf4\x67\x8b\xcd\x24\xec\x97\x76\x3b\x73\x90\x6d\x56\xb2\x60\xcd\x77\x48\x08\x75\xce\xb9\xf9\x9c\xa4\x8a\xf3\xba\x40\xe2\xdf\x1a\x6a\xf5\xaa\xbc\xbd\x93\x28\x95\x9c\x2a\xc6\x2a\x09\xdf\x09\x61\xa3\x18\x7d\x42\x4d\xe5\xee\x6f\x3a\xf8\xe5\xec\x6f\x05\xb9\x22\xe6\xe0\x50\x26\xfe\xfb\x52\xaa\xce\xc5\xd4\x73\xa9\xdb\xab\xbb\x90\x7f\xc9\xa7\x97\x35\x16\xf7\xf1\x0a\xe2\xb4\x99\x85\x69\xc3\xc5\x93\xae\x80\xbc\xb2\xd2\x6c\xf4\x51\xf9\x5e\x0e\xce\x1e\x28\xac\x14\xaf\xc2\x76\x74\x34\x1d\xc1\xa1\x16\xf3\x61\x74\xe7\x98\x30\x37\x20\x6a\x0d\xed\x36\xb3\x41\x74\xa5\x86\xc8\x12\x33\xad\xec\xa3\xb2\x7f\xfd\xd3\xdb\x0c\x67\xb3\xf2\x2c\x21\x0c\x63\xed\xc3\x7c\x95\x9a\xf1\x9c\xd8\xcd\xc9\x4b\x62\xc8\x55\xba\x8d\xdf\xf2\xfd\x0d\x0d\x21\xda\x44\xdb\x14\x43\x25\xcf\x64\xd1\x61\x49\x3a\x60\xdf\x2b\x84\x2a\x5f\x26\x1c\x3d\x66\x25\x0a\x4f\xc9\xf1\x4a\x0d\xa5\x62\x36\xa1\x96\xfb\x91\x3e\x8b\x53\xbe\xa7\x2a\x49\xa4\x50\xdd\x81\x93\x94\x0e\x1a\xb8\xe6\x25\xfc\x7e\xfa\x48\x20\x34\xd2\x42\xdd\x43\x8c\x93\x9b\xbf\x01\xda\x4b\x62\x08\x8f\x86\xc9\x64\x0b\x15\xf1\x23\x18\xb4\x9b\xe2\x75\xea\x01\x60\xe9\x44\x80\xd9\xdc\x7d\x00\xff\x9d\x6d\x4c\x84\xcb\x6a\x94\x96\xea\x2e\xc1\x94\xee\x44\x7d\x84\x5e\xed\x46\xcf\x06\x2d\xbd\xbe\x0f\xe6\xdd\x70\x94\x95\x9c\x17\x91\x7e\xb6\x67\xe6\x22\xc3\x09\x55\x9d\x9b\xab\x6a\x07\xbc\x25\x50\x16\x11\x64\xa0\x5b\x7c\x54\x1c\xbe\xe6\xc4\xd5\xe2\x68\xd8\xb8\x01\x90\xce\x76\x5a\x6f\x79\x59\x92\xd4\xb8\x2b\xa2\x20\x80\x6e\x92\xc7\x94\x50\xe5\x07\x2f\x14\xdd\x0c\xae\x58\x22\x31\x6e\x6a\x62\xe2\xd9\xfc\x33\xa5\xd5\x43\xbb\xfb\x67\x51\x47\xa5\x7f\x03\x43\xb8\x11\x4b\x1c\xbc\x81\x11\x64\xd7\xfd\x20\x2a\x98\xb4\x05\xaa\x6f\x07\x93\xc2\x51\xd9\x22\xfb\xb7\xcb\xfe\xf4\x4e\x39\xee\xeb\x00\x11\x01\x60\xbe\xdf\x98\x96\x05\x5f\x8f\x75\xdf\xbf\xf3\xdc\x82\x0c\x37\xc8\xe8\x8e\xe0\xc3\xc4\xe5\x1b\x79\x18\x65\x29\x78\x88\x65\xf2\xed\x66\xbb\x1c\x99\x33\x4c\x31\x75\x58\xaa\x5f\xd7\x53\xed\xd2\x8b\x60\x9f\x00\x27\x31\x64\x2a\x9b\x5f\x71\xc0\xd9\x7c\x99\xb4\x24\x19\x27\xa9\xd4\xee\xc3\xe5\xdb\xa4\xcd\xfe\x2f\x80\xb3\xcf\xa6\x67\x7b\x63\x53\x21\x7a\x84\xb8\x2f\x8f\x53\xe3\xd7\x05\x75\x58\x44\x09\x2f\x03\xb5\xf3\x93\x79\x6c\xe7\xcf\x83\x05\xc0\xa9\x94\xcb\xd1\xd3\x0f\x75\x49\x61\xd9\xd3\xf3\x73\x6b\xb7\xc0\xf0\x28\xf9\xbb\xdc\xad\x2a\xb3\xa9\x2a\x1e\xb5\xd7\x9e\x18\xca\x81\x3c\xb9\x0e\x54\x7b\x79\x9f\x1c\x54\x5c\x67\x77\x7d\x2c\x78\x41\xaa\x7c\x4a\xa7\xbc\xbe\x59\xb1\x13\xff\x83\xfd\x56\x52\x3c\xd1\xff\xcf\xcf\x97\x44\x10\x8a\xa6\x57\x7a\x5a\xce\x85\xcc\xb7\x2f\x02\xa0\x77\x4e\x0f\xbc\x47\x0d\x9b\x3b\x8b\x74\x40\xad\x1e\xda\x84\x53\x29\x80\x7c\x79\xee\xdc\x7f\xd5\xa9\xfd\xe5\xe3\x0f\xbb\x14\xa6\xa6\x25\x52\x41\x09\x55\xee\x3f\xcf\xf5\xab\x04\x12\xf9\x0f\xc8\x78\xe5\x89\x72\x06\x33\x4e\x47\xc7\xa8\xe7\x45\xd5\xa7\x64\x21\x50\xb6\xee\xef\x5f\x59\xff\x1c\xa0\x02\x07\x5c\x9f\x2c\x61\xbd\x23\xb1\x3d\x20\xdd\xbd\x15\x12\x4e\x34\x56\x70\x42\xab\x73\xdc\xa4\xe9\xc2', 2)
| 11,497.666667
| 34,435
| 0.75021
| 8,611
| 34,493
| 3.003484
| 0.030775
| 0.004408
| 0.004524
| 0.003712
| 0.001508
| 0.000928
| 0.000928
| 0
| 0
| 0
| 0
| 0.310947
| 0.000232
| 34,493
| 3
| 34,435
| 11,497.666667
| 0.439031
| 0
| 0
| 0
| 0
| 0.333333
| 0.997159
| 0.997159
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
a758345cf14f1bf8b081f69a5cb95a00bf6ddc1b
| 7,991
|
py
|
Python
|
Stcgan_net.py
|
Vivianyzw/DeshadowGANs
|
1b2dc4149bfa640f0b2ac9b4f9d0e3f2bc8737a3
|
[
"Apache-2.0"
] | 30
|
2019-02-08T14:48:37.000Z
|
2021-09-11T05:39:43.000Z
|
Stcgan_net.py
|
Vivianyzw/DeshadowGANs
|
1b2dc4149bfa640f0b2ac9b4f9d0e3f2bc8737a3
|
[
"Apache-2.0"
] | 4
|
2018-07-26T12:45:38.000Z
|
2020-07-08T08:55:18.000Z
|
Stcgan_net.py
|
Vivianyzw/DeshadowGANs
|
1b2dc4149bfa640f0b2ac9b4f9d0e3f2bc8737a3
|
[
"Apache-2.0"
] | 16
|
2018-07-18T08:40:53.000Z
|
2020-04-08T00:03:18.000Z
|
import torch
import torch.nn as nn
class Generator_first(nn.Module):
def __init__(self):
super(Generator_first, self).__init__()
self.conv0 = nn.Sequential(
nn.Conv2d(3, 64, 3, 1, 1),
nn.LeakyReLU(),
)
self.conv1 = nn.Sequential(
nn.Conv2d(64, 128, 3, 1, 1),
nn.BatchNorm2d(128),
nn.LeakyReLU()
)
self.conv2 = nn.Sequential(
nn.Conv2d(128, 256, 3, 1, 1),
nn.BatchNorm2d(256),
nn.LeakyReLU()
)
self.conv3 = nn.Sequential(
nn.Conv2d(256, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU()
)
self.conv4 = nn.Sequential(
nn.Conv2d(512, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU()
)
self.conv5 = nn.Sequential(
nn.Conv2d(512, 512, 3, 1, 1),
nn.ReLU()
)
self.convt6 = nn.Sequential(
nn.ConvTranspose2d(512, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.ReLU()
)
self.convt7 = nn.Sequential(
nn.ConvTranspose2d(1024, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.ReLU()
)
self.convt8 = nn.Sequential(
nn.ConvTranspose2d(1024, 256, 3, 1, 1),
nn.BatchNorm2d(256),
nn.ReLU()
)
self.convt9 = nn.Sequential(
nn.ConvTranspose2d(512, 128, 3, 1, 1),
nn.BatchNorm2d(128),
nn.ReLU()
)
self.convt10 = nn.Sequential(
nn.ConvTranspose2d(256, 64, 3, 1, 1),
nn.BatchNorm2d(64),
nn.ReLU()
)
self.convt11 = nn.Sequential(
nn.ConvTranspose2d(128, 1, 3, 1, 1),
nn.Tanh()
)
self._initialize_weights()
def forward(self, input):
conv0 = self.conv0(input)
conv1 = self.conv1(conv0)
conv2 = self.conv2(conv1)
conv3 = self.conv3(conv2)
conv4 = self.conv4(conv3)
conv4 = self.conv4(conv4)
conv4 = self.conv4(conv4)
conv5 = self.conv5(conv4)
convt6 = self.convt6(conv5)
conv6 = torch.cat((conv4, convt6), 1)
convt7 = self.convt7(conv6)
conv6 = torch.cat((conv4, convt7), 1)
convt7 = self.convt7(conv6)
conv6 = torch.cat((conv4, convt7), 1)
convt7 = self.convt7(conv6)
conv7 = torch.cat((conv3, convt7), 1)
convt8 = self.convt8(conv7)
conv8 = torch.cat((conv2, convt8), 1)
convt9 = self.convt9(conv8)
conv9 = torch.cat((conv1, convt9), 1)
convt10 = self.convt10(conv9)
conv10 = torch.cat((conv0, convt10), 1)
convt11 = self.convt11(conv10)
return convt11
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.normal_(m.weight, mean=0, std=0.02)
torch.nn.init.constant_(m.bias, 0.1)
class Generator_second(nn.Module):
def __init__(self):
super(Generator_second, self).__init__()
self.conv0 = nn.Sequential(
nn.Conv2d(4, 64, 3, 1, 1),
nn.LeakyReLU(),
)
self.conv1 = nn.Sequential(
nn.Conv2d(64, 128, 3, 1, 1),
nn.BatchNorm2d(128),
nn.LeakyReLU()
)
self.conv2 = nn.Sequential(
nn.Conv2d(128, 256, 3, 1, 1),
nn.BatchNorm2d(256),
nn.LeakyReLU()
)
self.conv3 = nn.Sequential(
nn.Conv2d(256, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU()
)
self.conv4 = nn.Sequential(
nn.Conv2d(512, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU()
)
self.conv5 = nn.Sequential(
nn.Conv2d(512, 512, 3, 1, 1),
nn.ReLU()
)
self.convt6 = nn.Sequential(
nn.ConvTranspose2d(512, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.ReLU()
)
self.convt7 = nn.Sequential(
nn.ConvTranspose2d(1024, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.ReLU()
)
self.convt8 = nn.Sequential(
nn.ConvTranspose2d(1024, 256, 3, 1, 1),
nn.BatchNorm2d(256),
nn.ReLU()
)
self.convt9 = nn.Sequential(
nn.ConvTranspose2d(512, 128, 3, 1, 1),
nn.BatchNorm2d(128),
nn.ReLU()
)
self.convt10 = nn.Sequential(
nn.ConvTranspose2d(256, 64, 3, 1, 1),
nn.BatchNorm2d(64),
nn.ReLU()
)
self.convt11 = nn.Sequential(
nn.ConvTranspose2d(128, 3, 3, 1, 1),
nn.Tanh()
)
self._initialize_weights()
def forward(self, input):
conv0 = self.conv0(input)
conv1 = self.conv1(conv0)
conv2 = self.conv2(conv1)
conv3 = self.conv3(conv2)
conv4 = self.conv4(conv3)
conv4 = self.conv4(conv4)
conv4 = self.conv4(conv4)
conv5 = self.conv5(conv4)
convt6 = self.convt6(conv5)
conv6 = torch.cat((conv4, convt6), 1)
convt7 = self.convt7(conv6)
conv6 = torch.cat((conv4, convt7), 1)
convt7 = self.convt7(conv6)
conv6 = torch.cat((conv4, convt7), 1)
convt7 = self.convt7(conv6)
conv7 = torch.cat((conv3, convt7), 1)
convt8 = self.convt8(conv7)
conv8 = torch.cat((conv2, convt8), 1)
convt9 = self.convt9(conv8)
conv9 = torch.cat((conv1, convt9), 1)
convt10 = self.convt10(conv9)
conv10 = torch.cat((conv0, convt10), 1)
convt11 = self.convt11(conv10)
return convt11
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.normal_(m.weight, mean=0, std=0.02)
torch.nn.init.constant_(m.bias, 0.1)
class Discriminator_first(nn.Module):
def __init__(self):
super(Discriminator_first, self).__init__()
self.feature = nn.Sequential(
nn.Conv2d(4, 64, 3, 1, 1),
nn.LeakyReLU(),
nn.Conv2d(64, 128, 3, 1, 1),
nn.BatchNorm2d(128),
nn.LeakyReLU(),
nn.Conv2d(128, 256, 3, 1, 1),
nn.BatchNorm2d(256),
nn.LeakyReLU(),
nn.Conv2d(256, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU(),
nn.Conv2d(512, 1, 3, 1, 1),
nn.Sigmoid()
)
self._initialize_weights()
def forward(self, input):
output = self.feature(input)
return output
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.normal_(m.weight, mean=0, std=0.02)
torch.nn.init.constant_(m.bias, 0.1)
class Discriminator_second(nn.Module):
def __init__(self):
super(Discriminator_second, self).__init__()
self.feature = nn.Sequential(
nn.Conv2d(7, 64, 3, 1, 1),
nn.LeakyReLU(),
nn.Conv2d(64, 128, 3, 1, 1),
nn.BatchNorm2d(128),
nn.LeakyReLU(),
nn.Conv2d(128, 256, 3, 1, 1),
nn.BatchNorm2d(256),
nn.LeakyReLU(),
nn.Conv2d(256, 512, 3, 1, 1),
nn.BatchNorm2d(512),
nn.LeakyReLU(),
nn.Conv2d(512, 1, 3, 1, 1),
nn.Sigmoid()
)
self._initialize_weights()
def forward(self, input):
output = self.feature(input)
return output
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.normal_(m.weight, mean=0, std=0.02)
torch.nn.init.constant_(m.bias, 0.1)
| 30.616858
| 65
| 0.504818
| 929
| 7,991
| 4.273412
| 0.07535
| 0.017128
| 0.025693
| 0.042821
| 0.980856
| 0.980605
| 0.980605
| 0.939798
| 0.911587
| 0.911587
| 0
| 0.124779
| 0.362157
| 7,991
| 260
| 66
| 30.734615
| 0.65411
| 0
| 0
| 0.831933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05042
| false
| 0
| 0.008403
| 0
| 0.092437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a76678e8143e875880508a59f3c206addca5b1a8
| 80,678
|
py
|
Python
|
tests/conftest.py
|
VolumeFi/exchange-add
|
0021a55557cbce43f31b21078a44f62fb14e0d56
|
[
"Apache-2.0"
] | 5
|
2021-03-23T21:09:06.000Z
|
2021-07-06T19:29:38.000Z
|
tests/conftest.py
|
VolumeFi/exchange-add
|
0021a55557cbce43f31b21078a44f62fb14e0d56
|
[
"Apache-2.0"
] | 4
|
2021-02-08T17:08:06.000Z
|
2021-02-24T04:59:50.000Z
|
tests/conftest.py
|
VolumeFi/exchange-liquidity
|
0021a55557cbce43f31b21078a44f62fb14e0d56
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
import pytest, math
from brownie import accounts, UniswapExchangeAdd, UniswapExchangeRemove, CurveExchangeAdd, CurveExchangeRemove, SushiswapExchangeAdd, SushiswapExchangeRemove, BalancerExchangeAdd, BalancerExchangeRemove, UniswapV3ExchangeAdd, UniswapV3ExchangeRemove, Contract
@pytest.fixture(scope="session")
def MyUniswapExchangeAdd():
return UniswapExchangeAdd.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MyUniswapExchangeRemove():
return UniswapExchangeRemove.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MyUniswapV3ExchangeAdd():
return UniswapV3ExchangeAdd.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MyUniswapV3ExchangeRemove():
return UniswapV3ExchangeRemove.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MyCurveExchangeAdd():
return CurveExchangeAdd.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MyCurveExchangeRemove():
return CurveExchangeRemove.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MySushiswapExchangeAdd():
return SushiswapExchangeAdd.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MySushiswapExchangeRemove():
return SushiswapExchangeRemove.deploy({'from':accounts[0]})
@pytest.fixture(scope="session")
def MyBalancerExchangeAdd():
return BalancerExchangeAdd.deploy({'from': accounts[0]})
@pytest.fixture(scope="session")
def MyBalancerExchangeRemove():
return BalancerExchangeRemove.deploy({'from': accounts[0]})
@pytest.fixture(scope="session")
def UniswapV2Factory():
return Contract.from_abi("UniswapV2Factory", "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f", [{"inputs":[{"internalType":"address","name":"_feeToSetter","type":"address"}],"payable":False,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"token0","type":"address"},{"indexed":True,"internalType":"address","name":"token1","type":"address"},{"indexed":False,"internalType":"address","name":"pair","type":"address"},{"indexed":False,"internalType":"uint256","name":"","type":"uint256"}],"name":"PairCreated","type":"event"},{"constant":True,"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"allPairs","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"allPairsLength","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"}],"name":"createPair","outputs":[{"internalType":"address","name":"pair","type":"address"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"feeTo","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"feeToSetter","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"getPair","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"_feeTo","type":"address"}],"name":"setFeeTo","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"_feeToSetter","type":"address"}],"name":"setFeeToSetter","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"}])
@pytest.fixture(scope="session")
def UniswapV2Router02():
return Contract.from_abi("UniswapV2Router02", "0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D", [{"inputs":[{"internalType":"address","name":"_factory","type":"address"},{"internalType":"address","name":"_WETH","type":"address"}],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[],"name":"WETH","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"amountADesired","type":"uint256"},{"internalType":"uint256","name":"amountBDesired","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"addLiquidity","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"},{"internalType":"uint256","name":"liquidity","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"amountTokenDesired","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"addLiquidityETH","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"},{"internalType":"uint256","name":"liquidity","type":"uint256"}],"stateMutability":"payable","type":"function"},{"inputs":[],"name":"factory","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"reserveIn","type":"uint256"},{"internalType":"uint256","name":"reserveOut","type":"uint256"}],"name":"getAmountIn","outputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"reserveIn","type":"uint256"},{"internalType":"uint256","name":"reserveOut","type":"uint256"}],"name":"getAmountOut","outputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"}],"name":"getAmountsIn","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"}],"name":"getAmountsOut","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"reserveA","type":"uint256"},{"internalType":"uint256","name":"reserveB","type":"uint256"}],"name":"quote","outputs":[{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidity","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidityETH","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidityETHSupportingFeeOnTransferTokens","outputs":[{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityETHWithPermit","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityETHWithPermitSupportingFeeOnTransferTokens","outputs":[{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityWithPermit","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapETHForExactTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactETHForTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactETHForTokensSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForETH","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForETHSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForTokensSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"amountInMax","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapTokensForExactETH","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"amountInMax","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapTokensForExactTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"stateMutability":"payable","type":"receive"}])
@pytest.fixture(scope="session")
def SushiswapFactory():
return Contract.from_abi("SushiswapFactory", "0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac", [{"inputs":[{"internalType":"address","name":"_feeToSetter","type":"address"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"token0","type":"address"},{"indexed":True,"internalType":"address","name":"token1","type":"address"},{"indexed":False,"internalType":"address","name":"pair","type":"address"},{"indexed":False,"internalType":"uint256","name":"","type":"uint256"}],"name":"PairCreated","type":"event"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"allPairs","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"allPairsLength","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"}],"name":"createPair","outputs":[{"internalType":"address","name":"pair","type":"address"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"feeTo","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"feeToSetter","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"getPair","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"migrator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"pairCodeHash","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"address","name":"_feeTo","type":"address"}],"name":"setFeeTo","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_feeToSetter","type":"address"}],"name":"setFeeToSetter","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_migrator","type":"address"}],"name":"setMigrator","outputs":[],"stateMutability":"nonpayable","type":"function"}])
@pytest.fixture(scope="session")
def SushiswapRouter():
return Contract.from_abi("SushiswapRouter", "0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F", [{"inputs":[{"internalType":"address","name":"_factory","type":"address"},{"internalType":"address","name":"_WETH","type":"address"}],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[],"name":"WETH","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"amountADesired","type":"uint256"},{"internalType":"uint256","name":"amountBDesired","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"addLiquidity","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"},{"internalType":"uint256","name":"liquidity","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"amountTokenDesired","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"addLiquidityETH","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"},{"internalType":"uint256","name":"liquidity","type":"uint256"}],"stateMutability":"payable","type":"function"},{"inputs":[],"name":"factory","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"reserveIn","type":"uint256"},{"internalType":"uint256","name":"reserveOut","type":"uint256"}],"name":"getAmountIn","outputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"reserveIn","type":"uint256"},{"internalType":"uint256","name":"reserveOut","type":"uint256"}],"name":"getAmountOut","outputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"}],"name":"getAmountsIn","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"}],"name":"getAmountsOut","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"reserveA","type":"uint256"},{"internalType":"uint256","name":"reserveB","type":"uint256"}],"name":"quote","outputs":[{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"pure","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidity","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidityETH","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"removeLiquidityETHSupportingFeeOnTransferTokens","outputs":[{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityETHWithPermit","outputs":[{"internalType":"uint256","name":"amountToken","type":"uint256"},{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountTokenMin","type":"uint256"},{"internalType":"uint256","name":"amountETHMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityETHWithPermitSupportingFeeOnTransferTokens","outputs":[{"internalType":"uint256","name":"amountETH","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"tokenA","type":"address"},{"internalType":"address","name":"tokenB","type":"address"},{"internalType":"uint256","name":"liquidity","type":"uint256"},{"internalType":"uint256","name":"amountAMin","type":"uint256"},{"internalType":"uint256","name":"amountBMin","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"bool","name":"approveMax","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"removeLiquidityWithPermit","outputs":[{"internalType":"uint256","name":"amountA","type":"uint256"},{"internalType":"uint256","name":"amountB","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapETHForExactTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactETHForTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactETHForTokensSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForETH","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForETHSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountIn","type":"uint256"},{"internalType":"uint256","name":"amountOutMin","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapExactTokensForTokensSupportingFeeOnTransferTokens","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"amountInMax","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapTokensForExactETH","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"uint256","name":"amountInMax","type":"uint256"},{"internalType":"address[]","name":"path","type":"address[]"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"deadline","type":"uint256"}],"name":"swapTokensForExactTokens","outputs":[{"internalType":"uint256[]","name":"amounts","type":"uint256[]"}],"stateMutability":"nonpayable","type":"function"},{"stateMutability":"payable","type":"receive"}])
@pytest.fixture(scope="session")
def USDC():
return Contract.from_abi("USDC", "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", [{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"owner","type":"address"},{"indexed":True,"internalType":"address","name":"spender","type":"address"},{"indexed":False,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"authorizer","type":"address"},{"indexed":True,"internalType":"bytes32","name":"nonce","type":"bytes32"}],"name":"AuthorizationCanceled","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"authorizer","type":"address"},{"indexed":True,"internalType":"bytes32","name":"nonce","type":"bytes32"}],"name":"AuthorizationUsed","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"_account","type":"address"}],"name":"Blacklisted","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"newBlacklister","type":"address"}],"name":"BlacklisterChanged","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"burner","type":"address"},{"indexed":False,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Burn","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"newMasterMinter","type":"address"}],"name":"MasterMinterChanged","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"minter","type":"address"},{"indexed":True,"internalType":"address","name":"to","type":"address"},{"indexed":False,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"minter","type":"address"},{"indexed":False,"internalType":"uint256","name":"minterAllowedAmount","type":"uint256"}],"name":"MinterConfigured","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"oldMinter","type":"address"}],"name":"MinterRemoved","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":False,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":False,"inputs":[],"name":"Pause","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"newAddress","type":"address"}],"name":"PauserChanged","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"newRescuer","type":"address"}],"name":"RescuerChanged","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"from","type":"address"},{"indexed":True,"internalType":"address","name":"to","type":"address"},{"indexed":False,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"_account","type":"address"}],"name":"UnBlacklisted","type":"event"},{"anonymous":False,"inputs":[],"name":"Unpause","type":"event"},{"inputs":[],"name":"APPROVE_WITH_AUTHORIZATION_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"CANCEL_AUTHORIZATION_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"DECREASE_ALLOWANCE_WITH_AUTHORIZATION_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"INCREASE_ALLOWANCE_WITH_AUTHORIZATION_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"PERMIT_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"TRANSFER_WITH_AUTHORIZATION_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"validAfter","type":"uint256"},{"internalType":"uint256","name":"validBefore","type":"uint256"},{"internalType":"bytes32","name":"nonce","type":"bytes32"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"approveWithAuthorization","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"authorizer","type":"address"},{"internalType":"bytes32","name":"nonce","type":"bytes32"}],"name":"authorizationState","outputs":[{"internalType":"enum GasAbstraction.AuthorizationState","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_account","type":"address"}],"name":"blacklist","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"blacklister","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_amount","type":"uint256"}],"name":"burn","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"authorizer","type":"address"},{"internalType":"bytes32","name":"nonce","type":"bytes32"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"cancelAuthorization","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"minter","type":"address"},{"internalType":"uint256","name":"minterAllowedAmount","type":"uint256"}],"name":"configureMinter","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"currency","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"decrement","type":"uint256"}],"name":"decreaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"decrement","type":"uint256"},{"internalType":"uint256","name":"validAfter","type":"uint256"},{"internalType":"uint256","name":"validBefore","type":"uint256"},{"internalType":"bytes32","name":"nonce","type":"bytes32"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"decreaseAllowanceWithAuthorization","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"increment","type":"uint256"}],"name":"increaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"increment","type":"uint256"},{"internalType":"uint256","name":"validAfter","type":"uint256"},{"internalType":"uint256","name":"validBefore","type":"uint256"},{"internalType":"bytes32","name":"nonce","type":"bytes32"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"increaseAllowanceWithAuthorization","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"string","name":"tokenName","type":"string"},{"internalType":"string","name":"tokenSymbol","type":"string"},{"internalType":"string","name":"tokenCurrency","type":"string"},{"internalType":"uint8","name":"tokenDecimals","type":"uint8"},{"internalType":"address","name":"newMasterMinter","type":"address"},{"internalType":"address","name":"newPauser","type":"address"},{"internalType":"address","name":"newBlacklister","type":"address"},{"internalType":"address","name":"newOwner","type":"address"}],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"string","name":"newName","type":"string"}],"name":"initializeV2","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_account","type":"address"}],"name":"isBlacklisted","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"isMinter","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"masterMinter","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_to","type":"address"},{"internalType":"uint256","name":"_amount","type":"uint256"}],"name":"mint","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"minter","type":"address"}],"name":"minterAllowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"pause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"pauser","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"minter","type":"address"}],"name":"removeMinter","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"contract IERC20","name":"tokenContract","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"rescueERC20","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"rescuer","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"validAfter","type":"uint256"},{"internalType":"uint256","name":"validBefore","type":"uint256"},{"internalType":"bytes32","name":"nonce","type":"bytes32"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"transferWithAuthorization","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_account","type":"address"}],"name":"unBlacklist","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"unpause","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_newBlacklister","type":"address"}],"name":"updateBlacklister","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_newMasterMinter","type":"address"}],"name":"updateMasterMinter","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_newPauser","type":"address"}],"name":"updatePauser","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newRescuer","type":"address"}],"name":"updateRescuer","outputs":[],"stateMutability":"nonpayable","type":"function"}])
@pytest.fixture(scope="session")
def DAI():
return Contract.from_abi("DAI", "0x6B175474E89094C44Da98b954EedeAC495271d0F", [{"inputs":[{"internalType":"uint256","name":"chainId_","type":"uint256"}],"payable":False,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"src","type":"address"},{"indexed":True,"internalType":"address","name":"guy","type":"address"},{"indexed":False,"internalType":"uint256","name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":True,"inputs":[{"indexed":True,"internalType":"bytes4","name":"sig","type":"bytes4"},{"indexed":True,"internalType":"address","name":"usr","type":"address"},{"indexed":True,"internalType":"bytes32","name":"arg1","type":"bytes32"},{"indexed":True,"internalType":"bytes32","name":"arg2","type":"bytes32"},{"indexed":False,"internalType":"bytes","name":"data","type":"bytes"}],"name":"LogNote","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"src","type":"address"},{"indexed":True,"internalType":"address","name":"dst","type":"address"},{"indexed":False,"internalType":"uint256","name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"constant":True,"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"PERMIT_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"burn","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"guy","type":"address"}],"name":"deny","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"mint","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"src","type":"address"},{"internalType":"address","name":"dst","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"move","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"holder","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"nonce","type":"uint256"},{"internalType":"uint256","name":"expiry","type":"uint256"},{"internalType":"bool","name":"allowed","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"pull","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"push","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"guy","type":"address"}],"name":"rely","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"dst","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"src","type":"address"},{"internalType":"address","name":"dst","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"version","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"wards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"}])
@pytest.fixture(scope="session")
def WETH():
return Contract.from_abi("WETH", "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", [{"constant":True,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[],"name":"deposit","outputs":[],"payable":True,"stateMutability":"payable","type":"function"},{"constant":True,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"payable":True,"stateMutability":"payable","type":"fallback"},{"anonymous":False,"inputs":[{"indexed":True,"name":"src","type":"address"},{"indexed":True,"name":"guy","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"src","type":"address"},{"indexed":True,"name":"dst","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"dst","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"src","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}])
@pytest.fixture(scope="session")
def DAI_R():
return Contract.from_abi("DAI", "0xc7AD46e0b8a400Bb3C915120d284AafbA8fc4735", [{"inputs":[{"internalType":"uint256","name":"chainId_","type":"uint256"}],"payable":False,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"src","type":"address"},{"indexed":True,"internalType":"address","name":"guy","type":"address"},{"indexed":False,"internalType":"uint256","name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":True,"inputs":[{"indexed":True,"internalType":"bytes4","name":"sig","type":"bytes4"},{"indexed":True,"internalType":"address","name":"usr","type":"address"},{"indexed":True,"internalType":"bytes32","name":"arg1","type":"bytes32"},{"indexed":True,"internalType":"bytes32","name":"arg2","type":"bytes32"},{"indexed":False,"internalType":"bytes","name":"data","type":"bytes"}],"name":"LogNote","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"src","type":"address"},{"indexed":True,"internalType":"address","name":"dst","type":"address"},{"indexed":False,"internalType":"uint256","name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"constant":True,"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"PERMIT_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"burn","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"guy","type":"address"}],"name":"deny","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"mint","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"src","type":"address"},{"internalType":"address","name":"dst","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"move","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"holder","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"nonce","type":"uint256"},{"internalType":"uint256","name":"expiry","type":"uint256"},{"internalType":"bool","name":"allowed","type":"bool"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"pull","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"usr","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"push","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"guy","type":"address"}],"name":"rely","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"dst","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"internalType":"address","name":"src","type":"address"},{"internalType":"address","name":"dst","type":"address"},{"internalType":"uint256","name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"version","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"wards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"}])
@pytest.fixture(scope="session")
def WETH_R():
return Contract.from_abi("WETH", "0xc778417E063141139Fce010982780140Aa0cD5Ab", [{"constant":True,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[],"name":"deposit","outputs":[],"payable":True,"stateMutability":"payable","type":"function"},{"constant":True,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"payable":True,"stateMutability":"payable","type":"fallback"},{"anonymous":False,"inputs":[{"indexed":True,"name":"src","type":"address"},{"indexed":True,"name":"guy","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"src","type":"address"},{"indexed":True,"name":"dst","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"dst","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"src","type":"address"},{"indexed":False,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}])
@pytest.fixture(scope="session")
def USDT():
return Contract.from_abi("USDT", "0xdAC17F958D2ee523a2206206994597C13D831ec7", [{"constant":True,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_upgradedAddress","type":"address"}],"name":"deprecate","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"deprecated","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_evilUser","type":"address"}],"name":"addBlackList","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"upgradedAddress","outputs":[{"name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"address"}],"name":"balances","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"maximumFee","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"_totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[],"name":"unpause","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"_maker","type":"address"}],"name":"getBlackListStatus","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowed","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"paused","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"who","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[],"name":"pause","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"getOwner","outputs":[{"name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"newBasisPoints","type":"uint256"},{"name":"newMaxFee","type":"uint256"}],"name":"setParams","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"amount","type":"uint256"}],"name":"issue","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"amount","type":"uint256"}],"name":"redeem","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"remaining","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"basisPointsRate","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"address"}],"name":"isBlackListed","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_clearedUser","type":"address"}],"name":"removeBlackList","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"MAX_UINT","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"_blackListedUser","type":"address"}],"name":"destroyBlackFunds","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"inputs":[{"name":"_initialSupply","type":"uint256"},{"name":"_name","type":"string"},{"name":"_symbol","type":"string"},{"name":"_decimals","type":"uint256"}],"payable":False,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":False,"inputs":[{"indexed":False,"name":"amount","type":"uint256"}],"name":"Issue","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"name":"amount","type":"uint256"}],"name":"Redeem","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"name":"newAddress","type":"address"}],"name":"Deprecate","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"name":"feeBasisPoints","type":"uint256"},{"indexed":False,"name":"maxFee","type":"uint256"}],"name":"Params","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"name":"_blackListedUser","type":"address"},{"indexed":False,"name":"_balance","type":"uint256"}],"name":"DestroyedBlackFunds","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"name":"_user","type":"address"}],"name":"AddedBlackList","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"name":"_user","type":"address"}],"name":"RemovedBlackList","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"owner","type":"address"},{"indexed":True,"name":"spender","type":"address"},{"indexed":False,"name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"from","type":"address"},{"indexed":True,"name":"to","type":"address"},{"indexed":False,"name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":False,"inputs":[],"name":"Pause","type":"event"},{"anonymous":False,"inputs":[],"name":"Unpause","type":"event"}])
@pytest.fixture(scope="session")
def WBTC():
return Contract.from_abi("WBTC", "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", [{"constant":True,"inputs":[],"name":"mintingFinished","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"_token","type":"address"}],"name":"reclaimToken","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[],"name":"unpause","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"_to","type":"address"},{"name":"_amount","type":"uint256"}],"name":"mint","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"value","type":"uint256"}],"name":"burn","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[],"name":"claimOwnership","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"paused","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_spender","type":"address"},{"name":"_subtractedValue","type":"uint256"}],"name":"decreaseApproval","outputs":[{"name":"success","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"_owner","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[],"name":"renounceOwnership","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[],"name":"finishMinting","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[],"name":"pause","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"_spender","type":"address"},{"name":"_addedValue","type":"uint256"}],"name":"increaseApproval","outputs":[{"name":"success","type":"bool"}],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"pendingOwner","outputs":[{"name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"anonymous":False,"inputs":[],"name":"Pause","type":"event"},{"anonymous":False,"inputs":[],"name":"Unpause","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"burner","type":"address"},{"indexed":False,"name":"value","type":"uint256"}],"name":"Burn","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"to","type":"address"},{"indexed":False,"name":"amount","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":False,"inputs":[],"name":"MintFinished","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"previousOwner","type":"address"}],"name":"OwnershipRenounced","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"previousOwner","type":"address"},{"indexed":True,"name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"owner","type":"address"},{"indexed":True,"name":"spender","type":"address"},{"indexed":False,"name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"name":"from","type":"address"},{"indexed":True,"name":"to","type":"address"},{"indexed":False,"name":"value","type":"uint256"}],"name":"Transfer","type":"event"}])
@pytest.fixture(scope="session")
def crvLpToken():
return Contract.from_abi("crvLpToken", "0x845838DF265Dcd2c412A1Dc9e959c7d08537f8a2", [{"name":"Transfer","inputs":[{"type":"address","name":"_from","indexed":True},{"type":"address","name":"_to","indexed":True},{"type":"uint256","name":"_value","indexed":False}],"anonymous":False,"type":"event"},{"name":"Approval","inputs":[{"type":"address","name":"_owner","indexed":True},{"type":"address","name":"_spender","indexed":True},{"type":"uint256","name":"_value","indexed":False}],"anonymous":False,"type":"event"},{"outputs":[],"inputs":[{"type":"string","name":"_name"},{"type":"string","name":"_symbol"},{"type":"uint256","name":"_decimals"},{"type":"uint256","name":"_supply"}],"constant":False,"payable":False,"type":"constructor"},{"name":"set_minter","outputs":[],"inputs":[{"type":"address","name":"_minter"}],"constant":False,"payable":False,"type":"function","gas":36247},{"name":"totalSupply","outputs":[{"type":"uint256","name":"out"}],"inputs":[],"constant":True,"payable":False,"type":"function","gas":1181},{"name":"allowance","outputs":[{"type":"uint256","name":"out"}],"inputs":[{"type":"address","name":"_owner"},{"type":"address","name":"_spender"}],"constant":True,"payable":False,"type":"function","gas":1519},{"name":"transfer","outputs":[{"type":"bool","name":"out"}],"inputs":[{"type":"address","name":"_to"},{"type":"uint256","name":"_value"}],"constant":False,"payable":False,"type":"function","gas":74802},{"name":"transferFrom","outputs":[{"type":"bool","name":"out"}],"inputs":[{"type":"address","name":"_from"},{"type":"address","name":"_to"},{"type":"uint256","name":"_value"}],"constant":False,"payable":False,"type":"function","gas":111953},{"name":"approve","outputs":[{"type":"bool","name":"out"}],"inputs":[{"type":"address","name":"_spender"},{"type":"uint256","name":"_value"}],"constant":False,"payable":False,"type":"function","gas":39012},{"name":"mint","outputs":[],"inputs":[{"type":"address","name":"_to"},{"type":"uint256","name":"_value"}],"constant":False,"payable":False,"type":"function","gas":75733},{"name":"burn","outputs":[],"inputs":[{"type":"uint256","name":"_value"}],"constant":False,"payable":False,"type":"function","gas":76623},{"name":"burnFrom","outputs":[],"inputs":[{"type":"address","name":"_to"},{"type":"uint256","name":"_value"}],"constant":False,"payable":False,"type":"function","gas":76696},{"name":"name","outputs":[{"type":"string","name":"out"}],"inputs":[],"constant":True,"payable":False,"type":"function","gas":7853},{"name":"symbol","outputs":[{"type":"string","name":"out"}],"inputs":[],"constant":True,"payable":False,"type":"function","gas":6906},{"name":"decimals","outputs":[{"type":"uint256","name":"out"}],"inputs":[],"constant":True,"payable":False,"type":"function","gas":1511},{"name":"balanceOf","outputs":[{"type":"uint256","name":"out"}],"inputs":[{"type":"address","name":"arg0"}],"constant":True,"payable":False,"type":"function","gas":1695}])
| 876.934783
| 15,603
| 0.667902
| 7,916
| 80,678
| 6.791056
| 0.042825
| 0.080211
| 0.113378
| 0.080528
| 0.932326
| 0.922635
| 0.916998
| 0.894341
| 0.888891
| 0.860132
| 0
| 0.030755
| 0.003309
| 80,678
| 92
| 15,604
| 876.934783
| 0.637786
| 0.000211
| 0
| 0.323529
| 0
| 0
| 0.621217
| 0.021708
| 0
| 0
| 0.006248
| 0
| 0
| 1
| 0.323529
| true
| 0
| 0.029412
| 0.323529
| 0.676471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 13
|
a780380409325b52282ebc7531687ba87e169956
| 25,220
|
py
|
Python
|
scripts/slave/recipe_modules/chromium_tests/chromium_gpu_fyi.py
|
yjbanov/chromium_build
|
22e3872f14dbf367cd787caa638f3ac948eac7d7
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/slave/recipe_modules/chromium_tests/chromium_gpu_fyi.py
|
yjbanov/chromium_build
|
22e3872f14dbf367cd787caa638f3ac948eac7d7
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/slave/recipe_modules/chromium_tests/chromium_gpu_fyi.py
|
yjbanov/chromium_build
|
22e3872f14dbf367cd787caa638f3ac948eac7d7
|
[
"BSD-3-Clause"
] | 1
|
2020-07-23T11:05:06.000Z
|
2020-07-23T11:05:06.000Z
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from . import steps
SPEC = {
'settings': {
'build_gs_bucket': 'chromium-gpu-fyi-archive',
# WARNING: src-side runtest.py is only tested with chromium CQ builders.
# Usage not covered by chromium CQ is not supported and can break
# without notice.
'src_side_runtest_py': True,
},
'builders': {
'GPU Win Builder': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop', 'archive_gpu_tests',
'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'win',
},
'enable_swarming': True,
'use_isolate': True,
},
'GPU Win Builder (dbg)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop', 'archive_gpu_tests',
'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 32,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'win',
},
'enable_swarming': True,
'use_isolate': True,
},
'Win7 Release (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Win7 Debug (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder (dbg)',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Win8 Release (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Win8 Debug (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder (dbg)',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Win7 Release (ATI)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Win7 Debug (ATI)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder (dbg)',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Win7 Release (Intel)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder',
'testing': {
'platform': 'win',
},
# Swarming is deliberately NOT enabled on this one-off configuration.
# The GPU detection wasn't initially working (crbug.com/580331), and
# multiple copies of the machines have to be deployed into swarming
# in order to keep up with the faster cycle time of the tests.
'enable_swarming': False,
},
'Win7 Release dEQP (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'GPU Win x64 Builder': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop', 'archive_gpu_tests',
'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'win',
},
'enable_swarming': True,
'use_isolate': True,
},
'GPU Win x64 Builder (dbg)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop', 'archive_gpu_tests',
'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'win',
},
'enable_swarming': True,
'use_isolate': True,
},
'Win7 x64 Release (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win x64 Builder',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Win7 x64 Debug (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win x64 Builder (dbg)',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'GPU Linux Builder': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop',
'archive_gpu_tests', 'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'linux',
},
'use_isolate': True,
'enable_swarming': True,
},
'GPU Linux Builder (dbg)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop',
'archive_gpu_tests', 'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'linux',
},
'use_isolate': True,
'enable_swarming': True,
},
'Linux Release (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Linux Builder',
'testing': {
'platform': 'linux',
},
'enable_swarming': True,
},
'Linux Release (Intel Graphics Stack)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Linux Builder',
'testing': {
'platform': 'linux',
},
# Swarming is deliberately NOT enabled on this one-off configuration.
# Multiple copies of the machines have to be deployed into swarming
# in order to keep up with the faster cycle time of the tests.
'enable_swarming': False,
},
'Linux Release (ATI)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Linux Builder',
'testing': {
'platform': 'linux',
},
# Swarming is deliberately NOT enabled on this one-off configuration.
# Multiple copies of the machines have to be deployed into swarming
# in order to keep up with the faster cycle time of the tests.
'enable_swarming': False,
},
'Linux Debug (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Linux Builder (dbg)',
'testing': {
'platform': 'linux',
},
'enable_swarming': True,
},
'Linux Release dEQP (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Linux Builder',
'testing': {
'platform': 'linux',
},
# Swarming is deliberately NOT enabled on this one-off configuration.
# TODO(kbr): it isn't clear whether these tests will shard properly
# on Linux, so wait to make that change until a subsequent CL.
'enable_swarming': False,
},
'GPU Mac Builder': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop', 'archive_gpu_tests',
'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
'use_isolate': True,
},
'GPU Mac Builder (dbg)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop', 'archive_gpu_tests',
'chrome_with_codecs',
'internal_gles2_conform_tests'],
'gclient_config': 'chromium',
'gclient_apply_config': ['chrome_internal', 'angle_top_of_tree'],
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
'use_isolate': True,
},
'Mac 10.10 Release (Intel)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'Mac 10.10 Debug (Intel)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder (dbg)',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'Mac 10.10 Release (ATI)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder',
'testing': {
'platform': 'mac',
},
# Swarming is deliberately NOT enabled on this one-off configuration.
# Multiple copies of the machines have to be deployed into swarming
# in order to keep up with the faster cycle time of the tests.
'enable_swarming': False,
},
'Mac 10.10 Debug (ATI)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder (dbg)',
'testing': {
'platform': 'mac',
},
# Swarming is deliberately NOT enabled on this one-off configuration.
# Multiple copies of the machines have to be deployed into swarming
# in order to keep up with the faster cycle time of the tests.
'enable_swarming': False,
},
'Mac Retina Release': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'Mac Retina Debug': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder (dbg)',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'Mac 10.10 Retina Release (AMD)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'Mac 10.10 Retina Debug (AMD)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Debug',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder (dbg)',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'GPU Fake Linux Builder': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop',
'archive_gpu_tests', 'chrome_with_codecs' ],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'builder',
'compile_targets': [
],
'testing': {
'platform': 'linux',
},
'use_isolate': True,
'enable_swarming': True,
},
'Fake Linux Release (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Fake Linux Builder',
'testing': {
'platform': 'linux',
},
'enable_swarming': True,
},
# The following machines don't actually exist. They are specified
# here only in order to allow the associated src-side JSON entries
# to be read, and the "optional" GPU tryservers to be specified in
# terms of them.
'Optional Win7 Release (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Optional Win7 Release (ATI)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 32,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Win Builder',
'testing': {
'platform': 'win',
},
'enable_swarming': True,
},
'Optional Linux Release (NVIDIA)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['mb', 'ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'bot_type': 'tester',
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'parent_buildername': 'GPU Linux Builder',
'testing': {
'platform': 'linux',
},
'enable_swarming': True,
},
'Optional Mac 10.10 Release (Intel)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'Optional Mac Retina Release': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
'Optional Mac 10.10 Retina Release (AMD)': {
'chromium_config': 'chromium',
'chromium_apply_config': ['ninja_confirm_noop'],
'gclient_config': 'chromium',
'chromium_config_kwargs': {
'BUILD_CONFIG': 'Release',
'TARGET_BITS': 64,
},
'test_generators': [
steps.generate_gtest,
steps.generate_script,
steps.generate_isolated_script,
],
'bot_type': 'tester',
'parent_buildername': 'GPU Mac Builder',
'testing': {
'platform': 'mac',
},
'enable_swarming': True,
},
},
}
| 30.718636
| 76
| 0.568517
| 2,385
| 25,220
| 5.685115
| 0.074214
| 0.08629
| 0.113578
| 0.08629
| 0.943359
| 0.942695
| 0.942105
| 0.940261
| 0.924847
| 0.923224
| 0
| 0.008515
| 0.292189
| 25,220
| 820
| 77
| 30.756098
| 0.75105
| 0.069469
| 0
| 0.78399
| 0
| 0
| 0.463051
| 0.082132
| 0
| 0
| 0
| 0.00122
| 0
| 1
| 0
| false
| 0
| 0.001271
| 0
| 0.001271
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7c0ab2cbbfe67b4670e158f5b8c4d348931448b
| 48,921
|
py
|
Python
|
PhaseOne/s3dg_vars.py
|
VolpeUSDOT/fra-gctd
|
5452bd954d1a4e9c839b39dd6bcda1f59f320906
|
[
"MIT"
] | null | null | null |
PhaseOne/s3dg_vars.py
|
VolpeUSDOT/fra-gctd
|
5452bd954d1a4e9c839b39dd6bcda1f59f320906
|
[
"MIT"
] | null | null | null |
PhaseOne/s3dg_vars.py
|
VolpeUSDOT/fra-gctd
|
5452bd954d1a4e9c839b39dd6bcda1f59f320906
|
[
"MIT"
] | null | null | null |
# when training only a subset of variables, only the scope needs to be specified
s3dg_logits = 'InceptionV1/Logits'
# when warm-starting training using the Estimator API, variable names (including
# full scopes) must be specified exactly
s3dg_convs = [
'InceptionV1/Conv2d_1a_7x7/BatchNorm/beta',
'InceptionV1/Conv2d_1a_7x7/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Conv2d_1a_7x7/BatchNorm/moving_mean',
'InceptionV1/Conv2d_1a_7x7/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Conv2d_1a_7x7/BatchNorm/moving_variance',
'InceptionV1/Conv2d_1a_7x7/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Conv2d_1a_7x7/weights',
'InceptionV1/Conv2d_1a_7x7/weights/ExponentialMovingAverage',
'InceptionV1/Conv2d_2b_1x1/BatchNorm/beta',
'InceptionV1/Conv2d_2b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Conv2d_2b_1x1/BatchNorm/moving_mean',
'InceptionV1/Conv2d_2b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Conv2d_2b_1x1/BatchNorm/moving_variance',
'InceptionV1/Conv2d_2b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Conv2d_2b_1x1/weights',
'InceptionV1/Conv2d_2b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Conv2d_2c_3x3/BatchNorm/beta',
'InceptionV1/Conv2d_2c_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Conv2d_2c_3x3/BatchNorm/moving_mean',
'InceptionV1/Conv2d_2c_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Conv2d_2c_3x3/BatchNorm/moving_variance',
'InceptionV1/Conv2d_2c_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Conv2d_2c_3x3/self_gating/transformer_W/weights',
'InceptionV1/Conv2d_2c_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Conv2d_2c_3x3/temporal/biases',
'InceptionV1/Conv2d_2c_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Conv2d_2c_3x3/temporal/weights',
'InceptionV1/Conv2d_2c_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Conv2d_2c_3x3/weights',
'InceptionV1/Conv2d_2c_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_3b/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_3b/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_3b/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_3b/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_3c/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_3c/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_3c/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_3c/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4b/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4b/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4b/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_4b/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4c/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4c/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4c/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_4c/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4d/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4d/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4d/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_4d/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4e/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4e/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4e/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_4e/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4f/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4f/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_4f/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_4f/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_5b/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_5b/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_5b/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_5b/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_5c/Branch_0/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_5c/Branch_1/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/weights',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0a_1x1/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/temporal/biases',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/temporal/biases/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/temporal/weights',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/temporal/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/weights',
'InceptionV1/Mixed_5c/Branch_2/Conv2d_0b_3x3/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/BatchNorm/beta/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/self_gating/transformer_W/weights/ExponentialMovingAverage',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/weights',
'InceptionV1/Mixed_5c/Branch_3/Conv2d_0b_1x1/weights/ExponentialMovingAverage'
]
s3dg_vars = {
's3dg_convs': s3dg_convs,
's3dg_logits': s3dg_logits
}
| 79.160194
| 107
| 0.858016
| 6,626
| 48,921
| 5.920314
| 0.010112
| 0.234934
| 0.293668
| 0.082594
| 0.99378
| 0.991205
| 0.976522
| 0.959009
| 0.958117
| 0.897293
| 0
| 0.089054
| 0.038245
| 48,921
| 618
| 108
| 79.160194
| 0.744697
| 0.004006
| 0
| 0
| 0
| 0
| 0.923525
| 0.922725
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ac41d1a34c7b6d2ac86393118d268184dfd5de60
| 6,084
|
py
|
Python
|
tests/test_layer.py
|
TenebraeX8/picknmix
|
dceedff8ca7982d1ad6b69bb53ed117d84f71f84
|
[
"MIT"
] | 1
|
2020-07-27T19:17:24.000Z
|
2020-07-27T19:17:24.000Z
|
tests/test_layer.py
|
TenebraeX8/picknmix
|
dceedff8ca7982d1ad6b69bb53ed117d84f71f84
|
[
"MIT"
] | null | null | null |
tests/test_layer.py
|
TenebraeX8/picknmix
|
dceedff8ca7982d1ad6b69bb53ed117d84f71f84
|
[
"MIT"
] | null | null | null |
import pytest
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import MinMaxScaler
from sklearn.linear_model import LogisticRegression
from picknmix import Layer
class TestLayer:
def test_different_numbers_of_preprocessor_and_models(self):
with pytest.raises(Exception):
assert Layer([LinearRegression(), LinearRegression()],
[MinMaxScaler()])
def test_fit_single_model_without_preprocess(self):
layer_model = Layer([LinearRegression()])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
# X and y are linearly related, predictions will be almost perfect
result = layer_model.fit(X, y)
assert result.shape == (4,1)
assert np.allclose(result.flatten(), y)
def test_fir_single_model_with_preprocess(self):
layer_model = Layer([LinearRegression()],
[MinMaxScaler()])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
# X and y are linearly related, predictions will be almost perfect
result = layer_model.fit(X, y)
assert result.shape == (4,1)
assert np.allclose(result.flatten(), y)
def test_fit_single_model_with_2_class_proba(self):
layer_model = Layer([LogisticRegression(solver='liblinear')],
proba=True)
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.array([1, 1, 0, 0])
result = layer_model.fit(X, y)
assert result.shape == (4,2)
def test_fit_single_model_with_multi_class_proba(self):
layer_model = Layer([LogisticRegression(solver='lbfgs',
multi_class='multinomial')],
proba=True)
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.array([1, 1, 0, 2])
result = layer_model.fit(X, y)
assert result.shape == (4,3)
def test_fit_multiple_models(self):
layer_model = Layer([LinearRegression(), LinearRegression()],
[None, MinMaxScaler()])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
# X and y are linearly related, predictions will be almost perfect
result = layer_model.fit(X, y)
assert result.shape == (4,2)
assert np.allclose(result[:,0], y)
assert np.allclose(result[:,1], y)
def test_fit_multiple_model_with_2_class_proba(self):
layer_model = Layer([LogisticRegression(solver='liblinear'),
LogisticRegression(solver='liblinear')],
proba=[True,False])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.array([1, 1, 0, 0])
result = layer_model.fit(X, y)
assert result.shape == (4,3)
def test_predict_single_model_without_preprocess(self):
layer_model = Layer([LinearRegression()])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
layer_model.fit(X, y)
result = layer_model.predict(np.array([[3, 5],[3, 5]]))
assert result.shape == (2,1)
assert np.allclose(result, np.array([[16],[16]]))
def test_predict_single_model_with_preprocess(self):
layer_model = Layer([LinearRegression()],
[MinMaxScaler()])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
layer_model.fit(X, y)
result = layer_model.predict(np.array([[3, 5]]))
assert result.shape == (1,1)
assert np.allclose(result, np.array([[16]]))
def test_predict_single_model_with_2_class_proba(self):
layer_model = Layer([LogisticRegression(solver='liblinear')],
proba=True)
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.array([1, 1, 0, 0])
layer_model.fit(X, y)
result = layer_model.predict(np.array([[3, 5]]))
assert result.shape == (1,2)
def test_predict_single_model_with_multi_class_proba(self):
layer_model = Layer([LogisticRegression(solver='lbfgs',
multi_class='multinomial')],
proba=True)
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.array([1, 1, 0, 2])
layer_model.fit(X, y)
result = layer_model.predict(np.array([[3, 5]]))
assert result.shape == (1,3)
def test_predict_multiple_model(self):
layer_model = Layer([LinearRegression(), LinearRegression()],
[None, MinMaxScaler()])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
layer_model.fit(X, y)
result = layer_model.predict(np.array([[3, 5]]))
assert result.shape == (1,2)
assert np.allclose(result, np.array([[16, 16]]))
def test_predict_multiple_model_with_2_class_proba(self):
layer_model = Layer([LogisticRegression(solver='liblinear'),
LogisticRegression(solver='liblinear')],
proba=[True,False])
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.array([1, 1, 0, 0])
layer_model.fit(X, y)
result = layer_model.predict(np.array([[3, 5], [2, 5]]))
assert result.shape == (2,3)
def test_using_proba_without_predict_proba_method(self):
with pytest.warns(Warning) as record:
layer_model = Layer([LinearRegression()],
proba=True)
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
layer_model.fit(X, y)
result = layer_model.predict(np.array([[3, 5],[3, 5]]))
assert result.shape == (2,1)
assert np.allclose(result, np.array([[16],[16]]))
assert record
| 43.148936
| 76
| 0.54142
| 802
| 6,084
| 3.961347
| 0.092269
| 0.024551
| 0.065471
| 0.056657
| 0.858986
| 0.814605
| 0.806106
| 0.8017
| 0.791627
| 0.791627
| 0
| 0.050363
| 0.298323
| 6,084
| 140
| 77
| 43.457143
| 0.693839
| 0.031887
| 0
| 0.727273
| 0
| 0
| 0.014611
| 0
| 0
| 0
| 0
| 0
| 0.190083
| 1
| 0.115702
| false
| 0
| 0.049587
| 0
| 0.173554
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac66df35a8ba478eaeed07f0ca746991df425867
| 16,244
|
py
|
Python
|
eppy/geometry/mcve.py
|
hnagda/eppy
|
422399ada78eb9f39ae61f96b385fe41a0a19100
|
[
"MIT"
] | null | null | null |
eppy/geometry/mcve.py
|
hnagda/eppy
|
422399ada78eb9f39ae61f96b385fe41a0a19100
|
[
"MIT"
] | null | null | null |
eppy/geometry/mcve.py
|
hnagda/eppy
|
422399ada78eb9f39ae61f96b385fe41a0a19100
|
[
"MIT"
] | null | null | null |
from eppy.iddcurrent import iddcurrent
from eppy.modeleditor import IDF
from six import StringIO
iddsnippet = iddcurrent.iddtxt
iddfhandle = StringIO(iddcurrent.iddtxt)
IDF.setiddname(iddfhandle)
idftxt = """
Version,
8.5; !- Version Identifier
Building,
Building 1, !- Name
, !- North Axis
, !- Terrain
, !- Loads Convergence Tolerance Value
, !- Temperature Convergence Tolerance Value
, !- Solar Distribution
, !- Maximum Number of Warmup Days
; !- Minimum Number of Warmup Days
Zone,
Thermal Zone 1, !- Name
-0.0, !- Direction of Relative North
3.41258124196863, !- X Origin
0.821279819391803, !- Y Origin
0.7279, !- Z Origin
, !- Type
, !- Multiplier
, !- Ceiling Height
, !- Volume
, !- Floor Area
, !- Zone Inside Convection Algorithm
; !- Zone Outside Convection Algorithm
Zone,
Thermal Zone 2, !- Name
-0.0, !- Direction of Relative North
3.41258124196863, !- X Origin
0.821279819391803, !- Y Origin
0.0, !- Z Origin
, !- Type
, !- Multiplier
, !- Ceiling Height
, !- Volume
, !- Floor Area
, !- Zone Inside Convection Algorithm
; !- Zone Outside Convection Algorithm
BuildingSurface:Detailed,
z1 Floor 0001, !- Name
Floor, !- Surface Type
, !- Construction Name
Thermal Zone 1, !- Zone Name
Ground, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
NoSun, !- Sun Exposure
NoWind, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-0.259, !- Vertex 1 Xcoordinate
2.46, !- Vertex 1 Ycoordinate
0.0, !- Vertex 1 Zcoordinate
-0.259, !- Vertex 2 Xcoordinate
0.4, !- Vertex 2 Ycoordinate
0.0, !- Vertex 2 Zcoordinate
-1.68, !- Vertex 3 Xcoordinate
0.4, !- Vertex 3 Ycoordinate
0.0, !- Vertex 3 Zcoordinate
-1.68, !- Vertex 4 Xcoordinate
2.46, !- Vertex 4 Ycoordinate
0.0; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z1 Wall 0001, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 1, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-0.259, !- Vertex 1 Xcoordinate
2.46, !- Vertex 1 Ycoordinate
0.7279, !- Vertex 1 Zcoordinate
-0.259, !- Vertex 2 Xcoordinate
2.46, !- Vertex 2 Ycoordinate
0.0, !- Vertex 2 Zcoordinate
-1.68, !- Vertex 3 Xcoordinate
2.46, !- Vertex 3 Ycoordinate
0.0, !- Vertex 3 Zcoordinate
-1.68, !- Vertex 4 Xcoordinate
2.46, !- Vertex 4 Ycoordinate
0.7279; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z1 Wall 0002, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 1, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-0.259, !- Vertex 1 Xcoordinate
0.4, !- Vertex 1 Ycoordinate
0.7279, !- Vertex 1 Zcoordinate
-0.259, !- Vertex 2 Xcoordinate
0.4, !- Vertex 2 Ycoordinate
0.0, !- Vertex 2 Zcoordinate
-0.259, !- Vertex 3 Xcoordinate
2.46, !- Vertex 3 Ycoordinate
0.0, !- Vertex 3 Zcoordinate
-0.259, !- Vertex 4 Xcoordinate
2.46, !- Vertex 4 Ycoordinate
0.7279; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z1 Wall 0003, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 1, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-1.68, !- Vertex 1 Xcoordinate
0.4, !- Vertex 1 Ycoordinate
0.7279, !- Vertex 1 Zcoordinate
-1.68, !- Vertex 2 Xcoordinate
0.4, !- Vertex 2 Ycoordinate
0.0, !- Vertex 2 Zcoordinate
-0.259, !- Vertex 3 Xcoordinate
0.4, !- Vertex 3 Ycoordinate
0.0, !- Vertex 3 Zcoordinate
-0.259, !- Vertex 4 Xcoordinate
0.4, !- Vertex 4 Ycoordinate
0.7279; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z1 Wall 0004, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 1, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-1.68, !- Vertex 1 Xcoordinate
2.46, !- Vertex 1 Ycoordinate
0.7279, !- Vertex 1 Zcoordinate
-1.68, !- Vertex 2 Xcoordinate
2.46, !- Vertex 2 Ycoordinate
0.0, !- Vertex 2 Zcoordinate
-1.68, !- Vertex 3 Xcoordinate
0.4, !- Vertex 3 Ycoordinate
0.0, !- Vertex 3 Zcoordinate
-1.68, !- Vertex 4 Xcoordinate
0.4, !- Vertex 4 Ycoordinate
0.7279; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z1 Roof 0001, !- Name
Roof, !- Surface Type
, !- Construction Name
Thermal Zone 1, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-0.259, !- Vertex 1 Xcoordinate
0.4, !- Vertex 1 Ycoordinate
0.7279, !- Vertex 1 Zcoordinate
-0.259, !- Vertex 2 Xcoordinate
2.46, !- Vertex 2 Ycoordinate
0.7279, !- Vertex 2 Zcoordinate
-1.68, !- Vertex 3 Xcoordinate
2.46, !- Vertex 3 Ycoordinate
0.7279, !- Vertex 3 Zcoordinate
-1.68, !- Vertex 4 Xcoordinate
0.4, !- Vertex 4 Ycoordinate
0.7279; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z2 Floor 0001, !- Name
Floor, !- Surface Type
, !- Construction Name
Thermal Zone 2, !- Zone Name
Ground, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
NoSun, !- Sun Exposure
NoWind, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
0.0, !- Vertex 1 Xcoordinate
2.9, !- Vertex 1 Ycoordinate
0.7279, !- Vertex 1 Zcoordinate
0.0, !- Vertex 2 Xcoordinate
0.0, !- Vertex 2 Ycoordinate
0.7279, !- Vertex 2 Zcoordinate
-2.14, !- Vertex 3 Xcoordinate
0.0, !- Vertex 3 Ycoordinate
0.7279, !- Vertex 3 Zcoordinate
-2.14, !- Vertex 4 Xcoordinate
2.9, !- Vertex 4 Ycoordinate
0.7279; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z2 Wall 0001, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 2, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-2.14, !- Vertex 1 Xcoordinate
0.0, !- Vertex 1 Ycoordinate
1.458, !- Vertex 1 Zcoordinate
-2.14, !- Vertex 2 Xcoordinate
0.0, !- Vertex 2 Ycoordinate
0.7279, !- Vertex 2 Zcoordinate
0.0, !- Vertex 3 Xcoordinate
0.0, !- Vertex 3 Ycoordinate
0.7279, !- Vertex 3 Zcoordinate
0.0, !- Vertex 4 Xcoordinate
0.0, !- Vertex 4 Ycoordinate
1.458; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z2 Wall 0002, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 2, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
-2.14, !- Vertex 1 Xcoordinate
2.9, !- Vertex 1 Ycoordinate
1.458, !- Vertex 1 Zcoordinate
-2.14, !- Vertex 2 Xcoordinate
2.9, !- Vertex 2 Ycoordinate
0.7279, !- Vertex 2 Zcoordinate
-2.14, !- Vertex 3 Xcoordinate
0.0, !- Vertex 3 Ycoordinate
0.7279, !- Vertex 3 Zcoordinate
-2.14, !- Vertex 4 Xcoordinate
0.0, !- Vertex 4 Ycoordinate
1.458; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z2 Wall 0003, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 2, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
0.0, !- Vertex 1 Xcoordinate
2.9, !- Vertex 1 Ycoordinate
1.458, !- Vertex 1 Zcoordinate
0.0, !- Vertex 2 Xcoordinate
2.9, !- Vertex 2 Ycoordinate
0.7279, !- Vertex 2 Zcoordinate
-2.14, !- Vertex 3 Xcoordinate
2.9, !- Vertex 3 Ycoordinate
0.7279, !- Vertex 3 Zcoordinate
-2.14, !- Vertex 4 Xcoordinate
2.9, !- Vertex 4 Ycoordinate
1.458; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z2 Wall 0004, !- Name
Wall, !- Surface Type
, !- Construction Name
Thermal Zone 2, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
0.0, !- Vertex 1 Xcoordinate
0.0, !- Vertex 1 Ycoordinate
1.458, !- Vertex 1 Zcoordinate
0.0, !- Vertex 2 Xcoordinate
0.0, !- Vertex 2 Ycoordinate
0.7279, !- Vertex 2 Zcoordinate
0.0, !- Vertex 3 Xcoordinate
2.9, !- Vertex 3 Ycoordinate
0.7279, !- Vertex 3 Zcoordinate
0.0, !- Vertex 4 Xcoordinate
2.9, !- Vertex 4 Ycoordinate
1.458; !- Vertex 4 Zcoordinate
BuildingSurface:Detailed,
z2 Roof 0001, !- Name
Roof, !- Surface Type
, !- Construction Name
Thermal Zone 2, !- Zone Name
Outdoors, !- Outside Boundary Condition
, !- Outside Boundary Condition Object
SunExposed, !- Sun Exposure
WindExposed, !- Wind Exposure
, !- View Factor to Ground
, !- Number of Vertices
0.0, !- Vertex 1 Xcoordinate
0.0, !- Vertex 1 Ycoordinate
1.458, !- Vertex 1 Zcoordinate
0.0, !- Vertex 2 Xcoordinate
2.9, !- Vertex 2 Ycoordinate
1.458, !- Vertex 2 Zcoordinate
-2.14, !- Vertex 3 Xcoordinate
2.9, !- Vertex 3 Ycoordinate
1.458, !- Vertex 3 Zcoordinate
-2.14, !- Vertex 4 Xcoordinate
0.0, !- Vertex 4 Ycoordinate
1.458; !- Vertex 4 Zcoordinate
"""
idf = IDF()
idf.initreadtxt(idftxt)
idf.outputtype = "compressed"
idf.printidf()
| 46.81268
| 72
| 0.40538
| 1,307
| 16,244
| 5.038256
| 0.079572
| 0.011845
| 0.043736
| 0.080182
| 0.92407
| 0.92407
| 0.92407
| 0.91754
| 0.916325
| 0.915262
| 0
| 0.095522
| 0.524378
| 16,244
| 346
| 73
| 46.947977
| 0.756795
| 0
| 0
| 0.867692
| 0
| 0
| 0.982332
| 0.018468
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009231
| 0
| 0.009231
| 0.003077
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3bf9268ea8871f5c4b9b494f97eac19783e3d916
| 87
|
py
|
Python
|
gif2html5/date_manager.py
|
sshyran/gif2html5-app
|
e8461b8b0e8e1d963bb1b642acfc97c940b4c015
|
[
"MIT"
] | null | null | null |
gif2html5/date_manager.py
|
sshyran/gif2html5-app
|
e8461b8b0e8e1d963bb1b642acfc97c940b4c015
|
[
"MIT"
] | null | null | null |
gif2html5/date_manager.py
|
sshyran/gif2html5-app
|
e8461b8b0e8e1d963bb1b642acfc97c940b4c015
|
[
"MIT"
] | null | null | null |
import datetime
def get_current_date():
return datetime.date.today().isoformat()
| 14.5
| 44
| 0.747126
| 11
| 87
| 5.727273
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 5
| 45
| 17.4
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
3bfc353623120b2ec7e5dbf733e69b1161fddcd6
| 3,501
|
py
|
Python
|
migrations/versions/97b4fcf044a0_.py
|
kokes/ockovani-covid
|
09810ccf187a22b7a5bb5aeae6156b62175fbda7
|
[
"Apache-2.0"
] | 62
|
2021-01-16T18:02:57.000Z
|
2022-01-12T21:10:34.000Z
|
migrations/versions/97b4fcf044a0_.py
|
kokes/ockovani-covid
|
09810ccf187a22b7a5bb5aeae6156b62175fbda7
|
[
"Apache-2.0"
] | 224
|
2021-01-16T19:11:01.000Z
|
2022-03-15T19:42:37.000Z
|
migrations/versions/97b4fcf044a0_.py
|
kokes/ockovani-covid
|
09810ccf187a22b7a5bb5aeae6156b62175fbda7
|
[
"Apache-2.0"
] | 13
|
2021-01-16T18:38:11.000Z
|
2021-07-02T20:09:47.000Z
|
"""empty message
Revision ID: 97b4fcf044a0
Revises: c703c687dcbf
Create Date: 2021-04-12 23:26:07.579397
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '97b4fcf044a0'
down_revision = 'c703c687dcbf'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('cr_metriky', sa.Column('registrace_rezervace_prumer', sa.Float(), nullable=True))
op.add_column('cr_metriky', sa.Column('registrace_rezervace_prumer_zmena_den', sa.Float(), nullable=True))
op.add_column('cr_metriky', sa.Column('registrace_rezervace_prumer_zmena_tyden', sa.Float(), nullable=True))
op.alter_column('dodavky_vakcin', 'datum',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('dodavky_vakcin', 'vyrobce',
existing_type=sa.TEXT(),
nullable=False)
op.create_foreign_key(None, 'dodavky_vakcin', 'vakciny', ['vyrobce'], ['vyrobce'])
op.add_column('kraje_metriky', sa.Column('registrace_rezervace_prumer', sa.Float(), nullable=True))
op.add_column('kraje_metriky', sa.Column('registrace_rezervace_prumer_zmena_den', sa.Float(), nullable=True))
op.add_column('kraje_metriky', sa.Column('registrace_rezervace_prumer_zmena_tyden', sa.Float(), nullable=True))
op.add_column('ockovaci_mista_metriky', sa.Column('registrace_rezervace_prumer', sa.Float(), nullable=True))
op.add_column('ockovaci_mista_metriky', sa.Column('registrace_rezervace_prumer_zmena_den', sa.Float(), nullable=True))
op.add_column('ockovaci_mista_metriky', sa.Column('registrace_rezervace_prumer_zmena_tyden', sa.Float(), nullable=True))
op.add_column('okresy_metriky', sa.Column('registrace_rezervace_prumer', sa.Float(), nullable=True))
op.add_column('okresy_metriky', sa.Column('registrace_rezervace_prumer_zmena_den', sa.Float(), nullable=True))
op.add_column('okresy_metriky', sa.Column('registrace_rezervace_prumer_zmena_tyden', sa.Float(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('okresy_metriky', 'registrace_rezervace_prumer_zmena_tyden')
op.drop_column('okresy_metriky', 'registrace_rezervace_prumer_zmena_den')
op.drop_column('okresy_metriky', 'registrace_rezervace_prumer')
op.drop_column('ockovaci_mista_metriky', 'registrace_rezervace_prumer_zmena_tyden')
op.drop_column('ockovaci_mista_metriky', 'registrace_rezervace_prumer_zmena_den')
op.drop_column('ockovaci_mista_metriky', 'registrace_rezervace_prumer')
op.drop_column('kraje_metriky', 'registrace_rezervace_prumer_zmena_tyden')
op.drop_column('kraje_metriky', 'registrace_rezervace_prumer_zmena_den')
op.drop_column('kraje_metriky', 'registrace_rezervace_prumer')
op.drop_constraint(None, 'dodavky_vakcin', type_='foreignkey')
op.alter_column('dodavky_vakcin', 'vyrobce',
existing_type=sa.TEXT(),
nullable=True)
op.alter_column('dodavky_vakcin', 'datum',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.drop_column('cr_metriky', 'registrace_rezervace_prumer_zmena_tyden')
op.drop_column('cr_metriky', 'registrace_rezervace_prumer_zmena_den')
op.drop_column('cr_metriky', 'registrace_rezervace_prumer')
# ### end Alembic commands ###
| 53.861538
| 124
| 0.745216
| 437
| 3,501
| 5.615561
| 0.173913
| 0.185819
| 0.244499
| 0.195599
| 0.823146
| 0.823146
| 0.820701
| 0.820701
| 0.742054
| 0.560717
| 0
| 0.015023
| 0.125393
| 3,501
| 64
| 125
| 54.703125
| 0.786414
| 0.084262
| 0
| 0.255319
| 0
| 0
| 0.423162
| 0.301672
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.06383
| 0
| 0.106383
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
021ae8ee1062a5b89d9ca1a55140ec3e011849b5
| 172
|
py
|
Python
|
app/routes.py
|
Flyingblu/AskA_crawler
|
4ce63683dc2663e444f3788b09c298709e69399e
|
[
"MIT"
] | null | null | null |
app/routes.py
|
Flyingblu/AskA_crawler
|
4ce63683dc2663e444f3788b09c298709e69399e
|
[
"MIT"
] | null | null | null |
app/routes.py
|
Flyingblu/AskA_crawler
|
4ce63683dc2663e444f3788b09c298709e69399e
|
[
"MIT"
] | null | null | null |
from app import flask_app
from app import acrawler
from flask import request
@flask_app.route('/v2/linc/')
def index():
return acrawler.get_qs(request.args.get('p'))
| 19.111111
| 49
| 0.744186
| 28
| 172
| 4.464286
| 0.571429
| 0.112
| 0.208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006711
| 0.133721
| 172
| 8
| 50
| 21.5
| 0.832215
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
02205cea8f4013128909faaec9e0e80356150192
| 13,916
|
py
|
Python
|
tests/test_actor_critic.py
|
mingzhang96/DI-hpc
|
5431c283a91b77df7c6a86fb0affa60099d4bb31
|
[
"Apache-2.0"
] | 64
|
2021-07-08T02:18:08.000Z
|
2022-02-28T09:52:57.000Z
|
tests/test_actor_critic.py
|
mingzhang96/DI-hpc
|
5431c283a91b77df7c6a86fb0affa60099d4bb31
|
[
"Apache-2.0"
] | null | null | null |
tests/test_actor_critic.py
|
mingzhang96/DI-hpc
|
5431c283a91b77df7c6a86fb0affa60099d4bb31
|
[
"Apache-2.0"
] | 3
|
2021-07-14T08:58:45.000Z
|
2022-03-30T12:36:46.000Z
|
import time
import torch
import numpy as np
from testbase import mean_relative_error, times
import hpc_models
assert torch.cuda.is_available()
use_cuda = True
times = 100
batch_size = 8
max_entity_num = 182
input_dim = 1024
lstm_seq_len = 1
lstm_batch_size = 8
lstm_input_size = 32
lstm_hidden_size = 32
lstm_num_layers = 1
def torch_update_ae(autoregressive_embedding, key_embeddings, sample_entity, max_entity_num, end_flag):
bs = autoregressive_embedding.shape[0]
autoregressive_embedding = autoregressive_embedding + key_embeddings[torch.arange(bs), sample_entity] * ~end_flag.unsqueeze(dim=1)
return autoregressive_embedding
def actor_critic_update_ae_val():
ori_ae = torch.randn(batch_size, input_dim)
ori_ke = torch.randn(batch_size, max_entity_num, input_dim)
ori_entity_num = torch.randint(max_entity_num - 2, max_entity_num, size=(batch_size, ))
ori_sample_entity = []
for i in range(batch_size):
entity_num = ori_entity_num[i]
ori_sample_entity.append(torch.randint(0, entity_num, size=(1, )))
ori_sample_entity = torch.stack(ori_sample_entity, dim=0).squeeze(1)
ori_end_flag = torch.zeros(batch_size).bool()
hpc_ae = ori_ae.clone().detach()
hpc_ke = ori_ke.clone().detach()
hpc_entity_num = ori_entity_num.clone().detach()
hpc_sample_entity = ori_sample_entity.clone().detach()
hpc_end_flag = ori_end_flag.clone().detach()
if use_cuda:
ori_ae = ori_ae.cuda()
ori_ke = ori_ke.cuda()
ori_entity_num = ori_entity_num.cuda()
ori_sample_entity = ori_sample_entity.cuda()
ori_end_flag = ori_end_flag.cuda()
hpc_ae = hpc_ae.cuda()
hpc_ke = hpc_ke.cuda()
hpc_entity_num = hpc_entity_num.cuda()
hpc_sample_entity = hpc_sample_entity.cuda()
hpc_end_flag = hpc_end_flag.cuda()
ori_end_flag[ori_sample_entity == ori_entity_num] = 1
ori_out = torch_update_ae(ori_ae, ori_ke, ori_sample_entity, ori_entity_num, ori_end_flag)
hpc_end_flag[hpc_sample_entity == hpc_entity_num] = 1
hpc_models.actor_critic_update_ae([hpc_ke, hpc_sample_entity, hpc_entity_num], [hpc_ae])
hpc_out = hpc_ae
if use_cuda:
torch.cuda.synchronize()
mre = mean_relative_error(torch.flatten(ori_out).cpu().detach().numpy(), torch.flatten(hpc_out).cpu().detach().numpy())
print("actor critic update ae mean_relative_error: " + str(mre))
#print("ori_out: " + str(ori_out))
#print("hpc_out: " + str(hpc_out))
def actor_critic_update_ae_perf():
ori_ae = torch.randn(batch_size, input_dim)
ori_ke = torch.randn(batch_size, max_entity_num, input_dim)
ori_entity_num = torch.randint(max_entity_num - 2, max_entity_num, size=(batch_size, ))
ori_sample_entity = []
for i in range(batch_size):
entity_num = ori_entity_num[i]
ori_sample_entity.append(torch.randint(0, entity_num, size=(1, )))
ori_sample_entity = torch.stack(ori_sample_entity, dim=0).squeeze(1)
ori_end_flag = torch.zeros(batch_size).bool()
hpc_ae = ori_ae.clone().detach()
hpc_ke = ori_ke.clone().detach()
hpc_entity_num = ori_entity_num.clone().detach()
hpc_sample_entity = ori_sample_entity.clone().detach()
hpc_end_flag = ori_end_flag.clone().detach()
if use_cuda:
ori_ae = ori_ae.cuda()
ori_ke = ori_ke.cuda()
ori_entity_num = ori_entity_num.cuda()
ori_sample_entity = ori_sample_entity.cuda()
ori_end_flag = ori_end_flag.cuda()
hpc_ae = hpc_ae.cuda()
hpc_ke = hpc_ke.cuda()
hpc_entity_num = hpc_entity_num.cuda()
hpc_sample_entity = hpc_sample_entity.cuda()
hpc_end_flag = hpc_end_flag.cuda()
t = time.time()
for i in range(times):
ori_end_flag[ori_sample_entity == ori_entity_num] = 1
ori_out = torch_update_ae(ori_ae, ori_ke, ori_sample_entity, ori_entity_num, ori_end_flag)
if use_cuda:
torch.cuda.synchronize()
print('original update ae cost time: {}'.format(time.time() - t))
t = time.time()
for i in range(times):
hpc_end_flag[hpc_sample_entity == hpc_entity_num] = 1
hpc_models.actor_critic_update_ae([hpc_ke, hpc_sample_entity, hpc_entity_num], [hpc_ae])
hpc_out = hpc_ae
if use_cuda:
torch.cuda.synchronize()
print('hpc update ae cost time: {}'.format(time.time() - t))
def actor_critic_lstm_activation_val():
ori_x = torch.randn(lstm_seq_len, lstm_batch_size, lstm_input_size)
ori_h0 = torch.randn(lstm_num_layers, lstm_batch_size, lstm_hidden_size)
ori_c0 = torch.randn(lstm_num_layers, lstm_batch_size, lstm_hidden_size)
ori_lstm = torch.nn.LSTM(lstm_input_size, lstm_hidden_size, lstm_num_layers)
hpc_x = ori_x.clone().detach()
hpc_h0 = ori_h0.clone().detach()
hpc_c0 = ori_c0.clone().detach()
hpc_ih = torch.zeros(lstm_batch_size, lstm_hidden_size * 4)
hpc_hh = torch.zeros(lstm_batch_size, lstm_hidden_size * 4)
if use_cuda:
ori_x = ori_x.cuda()
ori_h0 = ori_h0.cuda()
ori_c0 = ori_c0.cuda()
ori_lstm = ori_lstm.cuda()
hpc_x = hpc_x.cuda()
hpc_h0 = hpc_h0.cuda()
hpc_c0 = hpc_c0.cuda()
hpc_ih = hpc_ih.cuda()
hpc_hh = hpc_hh.cuda()
ori_out, ori_state = ori_lstm.forward(ori_x, (ori_h0, ori_c0))
hpc_wih0 = ori_lstm.weight_ih_l0
hpc_whh0 = ori_lstm.weight_hh_l0
hpc_bih0 = ori_lstm.bias_ih_l0
hpc_bhh0 = ori_lstm.bias_hh_l0
hpc_bias = hpc_bih0 + hpc_bhh0
torch.matmul(hpc_x[0].detach(), hpc_wih0.transpose(0, 1).detach(), out = hpc_ih)
torch.matmul(hpc_h0[0].detach(), hpc_whh0.transpose(0, 1).detach(), out = hpc_hh)
hpc_models.actor_critic_lstm_activation([hpc_ih, hpc_hh, hpc_bias], [hpc_h0, hpc_c0])
hpc_out = hpc_h0
if use_cuda:
torch.cuda.synchronize()
mre = mean_relative_error(torch.flatten(ori_out).cpu().detach().numpy(), torch.flatten(hpc_out).cpu().detach().numpy())
print("actor critic lstm activation mean_relative_error: " + str(mre))
def actor_critic_lstm_activation_perf():
ori_x = torch.randn(lstm_seq_len, lstm_batch_size, lstm_input_size)
ori_h0 = torch.randn(lstm_num_layers, lstm_batch_size, lstm_hidden_size)
ori_c0 = torch.randn(lstm_num_layers, lstm_batch_size, lstm_hidden_size)
ori_lstm = torch.nn.LSTM(lstm_input_size, lstm_hidden_size, lstm_num_layers)
hpc_x = ori_x.clone().detach()
hpc_h0 = ori_h0.clone().detach()
hpc_c0 = ori_c0.clone().detach()
hpc_ih = torch.zeros(lstm_batch_size, lstm_hidden_size * 4)
hpc_hh = torch.zeros(lstm_batch_size, lstm_hidden_size * 4)
if use_cuda:
ori_x = ori_x.cuda()
ori_h0 = ori_h0.cuda()
ori_c0 = ori_c0.cuda()
ori_lstm = ori_lstm.cuda()
hpc_x = hpc_x.cuda()
hpc_h0 = hpc_h0.cuda()
hpc_c0 = hpc_c0.cuda()
hpc_ih = hpc_ih.cuda()
hpc_hh = hpc_hh.cuda()
ori_out, ori_state = ori_lstm.forward(ori_x, (ori_h0, ori_c0))
if use_cuda:
torch.cuda.synchronize()
hpc_wih0 = ori_lstm.weight_ih_l0
hpc_whh0 = ori_lstm.weight_hh_l0
hpc_bih0 = ori_lstm.bias_ih_l0
hpc_bhh0 = ori_lstm.bias_hh_l0
hpc_bias = hpc_bih0 + hpc_bhh0
t = time.time()
for i in range(times):
ori_out, ori_state = ori_lstm.forward(ori_x, (ori_h0, ori_c0))
if use_cuda:
torch.cuda.synchronize()
print('original lstm activation cost time: {}'.format(time.time() - t))
t = time.time()
for i in range(times):
torch.matmul(hpc_x[0].detach(), hpc_wih0.transpose(0, 1).detach(), out = hpc_ih)
torch.matmul(hpc_h0[0].detach(), hpc_whh0.transpose(0, 1).detach(), out = hpc_hh)
hpc_models.actor_critic_lstm_activation([hpc_ih, hpc_hh, hpc_bias], [hpc_h0, hpc_c0])
hpc_out = hpc_h0
if use_cuda:
torch.cuda.synchronize()
print('hpc lstm activation cost time: {}'.format(time.time() - t))
def actor_critic_pre_sample_val():
ori_x = torch.randn(lstm_seq_len, lstm_batch_size, lstm_hidden_size)
ori_key = torch.randn(lstm_batch_size, max_entity_num, lstm_hidden_size)
ori_mask = torch.zeros(lstm_batch_size, max_entity_num)
ori_entity_num = torch.randint(max_entity_num - 2, max_entity_num, size=(lstm_batch_size, ))
ori_sample_entity = []
ori_mask = []
for i in range(lstm_batch_size):
entity_num = ori_entity_num[i]
sample_entity = torch.randint(0, entity_num, size=(1, ))
ori_sample_entity.append(torch.randint(0, entity_num, size=(1, )))
mask = []
for j in range(max_entity_num):
if j < entity_num:
mask.append(torch.ones(size=(1, )))
else:
mask.append(torch.zeros(size=(1, )))
mask = torch.stack(mask, dim=0).squeeze(1)
ori_mask.append(mask)
ori_sample_entity = torch.stack(ori_sample_entity, dim=0).squeeze(1)
ori_mask = torch.stack(ori_mask, dim=0)
ori_mask[torch.arange(lstm_batch_size), ori_sample_entity] = 0
hpc_x = ori_x.clone().detach()
hpc_key = ori_key.clone().detach()
hpc_mask = ori_mask.clone().detach()
hpc_out = torch.zeros(lstm_batch_size, max_entity_num)
if use_cuda:
ori_x = ori_x.cuda()
ori_key = ori_key.cuda()
ori_mask = ori_mask.cuda()
hpc_x = hpc_x.cuda()
hpc_key = hpc_key.cuda()
hpc_mask = hpc_mask.cuda()
hpc_out = hpc_out.cuda()
ori_mask = ori_mask.bool()
hpc_mask = hpc_mask.bool()
ori_queries = ori_x.permute(1, 0, 2)
ori_query_result = ori_queries * ori_key
ori_step_logits = ori_query_result.sum(dim=2)
ori_step_logits = ori_step_logits.masked_fill(~ori_mask, -1e9)
ori_step_logits = ori_step_logits.div(0.8)
ori_out = ori_step_logits
hpc_models.actor_critic_pre_sample([hpc_key, hpc_x, hpc_mask], [hpc_out])
if use_cuda:
torch.cuda.synchronize()
mre = mean_relative_error(torch.flatten(ori_out).cpu().detach().numpy(), torch.flatten(hpc_out).cpu().detach().numpy())
print("actor critic pre sample mean_relative_error: " + str(mre))
assert np.allclose(ori_out.detach().cpu().numpy(), hpc_out.detach().cpu().numpy(), rtol=1e-5, atol=1e-5)
def actor_critic_pre_sample_perf():
ori_x = torch.randn(lstm_seq_len, lstm_batch_size, lstm_hidden_size)
ori_key = torch.randn(lstm_batch_size, max_entity_num, lstm_hidden_size)
ori_mask = torch.zeros(lstm_batch_size, max_entity_num)
ori_entity_num = torch.randint(max_entity_num - 2, max_entity_num, size=(lstm_batch_size, ))
ori_sample_entity = []
ori_mask = []
for i in range(lstm_batch_size):
entity_num = ori_entity_num[i]
sample_entity = torch.randint(0, entity_num, size=(1, ))
ori_sample_entity.append(torch.randint(0, entity_num, size=(1, )))
mask = []
for j in range(max_entity_num):
if j < entity_num:
mask.append(torch.ones(size=(1, )))
else:
mask.append(torch.zeros(size=(1, )))
mask = torch.stack(mask, dim=0).squeeze(1)
ori_mask.append(mask)
ori_sample_entity = torch.stack(ori_sample_entity, dim=0).squeeze(1)
ori_mask = torch.stack(ori_mask, dim=0)
ori_mask[torch.arange(lstm_batch_size), ori_sample_entity] = 0
hpc_x = ori_x.clone().detach()
hpc_key = ori_key.clone().detach()
hpc_mask = ori_mask.clone().detach()
hpc_out = torch.zeros(lstm_batch_size, max_entity_num)
if use_cuda:
ori_x = ori_x.cuda()
ori_key = ori_key.cuda()
ori_mask = ori_mask.cuda()
hpc_x = hpc_x.cuda()
hpc_key = hpc_key.cuda()
hpc_mask = hpc_mask.cuda()
hpc_out = hpc_out.cuda()
ori_mask = ori_mask.bool()
hpc_mask = hpc_mask.bool()
t = time.time()
for i in range(times):
ori_queries = ori_x.permute(1, 0, 2)
ori_query_result = ori_queries * ori_key
ori_step_logits = ori_query_result.sum(dim=2)
ori_step_logits = ori_step_logits.masked_fill(~ori_mask, -1e9)
ori_step_logits = ori_step_logits.div(0.8)
ori_out = ori_step_logits
if use_cuda:
torch.cuda.synchronize()
print('original pre sample cost time: {}'.format(time.time() - t))
t = time.time()
for i in range(times):
hpc_models.actor_critic_pre_sample([hpc_key, hpc_x, hpc_mask], [hpc_out])
if use_cuda:
torch.cuda.synchronize()
print('hpc pre sample cost time: {}'.format(time.time() - t))
if __name__ == '__main__':
print("target problem: batch_size = {}, max_entity_num = {}, input_dim = {}".format(batch_size, max_entity_num, input_dim))
print("================run actor critic update ae validation test================")
actor_critic_update_ae_val()
print("================run actor critic update ae performance test================")
actor_critic_update_ae_perf()
print("\n")
print("target problem: lstm_batch_size = {}, lstm_seq_len = {}, lstm_input_size = {}, lstm_hidden_size = {}, lstm_num_layers = {}".format(
lstm_batch_size, lstm_seq_len, lstm_input_size, lstm_hidden_size, lstm_num_layers))
print("================run actor critic lstm activation validation test================")
actor_critic_lstm_activation_val()
print("================run actor critic lstm activation performance test================")
actor_critic_lstm_activation_perf()
print("\n")
print("target problem: lstm_batch_size = {}, lstm_seq_len = {}, lstm_hidden_size = {}, max_entity_num = {}".format(
lstm_batch_size, lstm_seq_len, lstm_hidden_size, max_entity_num))
print("================run actor critic pre sample validation test================")
actor_critic_pre_sample_val()
print("================run actor critic pre sample performance test================")
actor_critic_pre_sample_perf()
print("\n")
| 38.230769
| 142
| 0.672032
| 2,144
| 13,916
| 3.967351
| 0.062966
| 0.067717
| 0.044322
| 0.031977
| 0.913708
| 0.871502
| 0.850811
| 0.843992
| 0.818364
| 0.800846
| 0
| 0.014499
| 0.197111
| 13,916
| 363
| 143
| 38.336088
| 0.7468
| 0.004743
| 0
| 0.816949
| 0
| 0
| 0.079001
| 0
| 0
| 0
| 0
| 0
| 0.00678
| 1
| 0.023729
| false
| 0
| 0.016949
| 0
| 0.044068
| 0.071186
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02819c15865800353cd1620255e29a94921fe835
| 2,925
|
py
|
Python
|
tests/format/vtp/test_write_vtp.py
|
ricosjp/femio
|
f43991132e530c97477374f4bba25a250a6acae8
|
[
"Apache-2.0"
] | 21
|
2020-12-27T11:05:35.000Z
|
2022-03-02T15:37:18.000Z
|
tests/format/vtp/test_write_vtp.py
|
ricosjp/femio
|
f43991132e530c97477374f4bba25a250a6acae8
|
[
"Apache-2.0"
] | null | null | null |
tests/format/vtp/test_write_vtp.py
|
ricosjp/femio
|
f43991132e530c97477374f4bba25a250a6acae8
|
[
"Apache-2.0"
] | 2
|
2021-04-28T09:41:09.000Z
|
2021-07-01T21:18:45.000Z
|
import pathlib
import shutil
import unittest
import numpy as np
from femio.fem_data import FEMData
class TestWriteVTP(unittest.TestCase):
def test_write_vtp_closed(self):
file_name = pathlib.Path('tests/data/vtp/closed/mesh.vtp')
fem_data = FEMData.read_files('vtp', [file_name])
write_file_name = pathlib.Path(
'tests/data/vtp/write_closed/mesh.vtp')
if write_file_name.exists():
shutil.rmtree(write_file_name.parent)
fem_data.write('vtp', write_file_name)
written_fem_data = FEMData.read_files('vtp', [write_file_name])
np.testing.assert_almost_equal(
written_fem_data.nodes.data, fem_data.nodes.data)
for ae, de in zip(
written_fem_data.elements.data, fem_data.elements.data):
np.testing.assert_almost_equal(ae, np.array(de))
np.testing.assert_almost_equal(
written_fem_data.elemental_data.get_attribute_data('p'),
fem_data.elemental_data.get_attribute_data('p'))
np.testing.assert_almost_equal(
written_fem_data.elemental_data.get_attribute_data('U'),
fem_data.elemental_data.get_attribute_data('U'))
np.testing.assert_almost_equal(
written_fem_data.nodal_data.get_attribute_data('p'),
fem_data.nodal_data.get_attribute_data('p'))
np.testing.assert_almost_equal(
written_fem_data.nodal_data.get_attribute_data('U'),
fem_data.nodal_data.get_attribute_data('U'))
def test_write_vtp_polys(self):
file_name = pathlib.Path('tests/data/vtp/polys/mesh.vtp')
fem_data = FEMData.read_files('vtp', [file_name])
write_file_name = pathlib.Path(
'tests/data/vtp/write_polys/mesh.vtp')
if write_file_name.exists():
shutil.rmtree(write_file_name.parent)
fem_data.write('vtp', write_file_name)
written_fem_data = FEMData.read_files('vtp', [write_file_name])
np.testing.assert_almost_equal(
written_fem_data.nodes.data, fem_data.nodes.data)
for ae, de in zip(
written_fem_data.elements.data, fem_data.elements.data):
np.testing.assert_almost_equal(ae, np.array(de))
np.testing.assert_almost_equal(
written_fem_data.elemental_data.get_attribute_data('p'),
fem_data.elemental_data.get_attribute_data('p'))
np.testing.assert_almost_equal(
written_fem_data.elemental_data.get_attribute_data('U'),
fem_data.elemental_data.get_attribute_data('U'))
np.testing.assert_almost_equal(
written_fem_data.nodal_data.get_attribute_data('p'),
fem_data.nodal_data.get_attribute_data('p'))
np.testing.assert_almost_equal(
written_fem_data.nodal_data.get_attribute_data('U'),
fem_data.nodal_data.get_attribute_data('U'))
| 39
| 72
| 0.674872
| 398
| 2,925
| 4.575377
| 0.130653
| 0.119165
| 0.140582
| 0.175728
| 0.901702
| 0.901702
| 0.901702
| 0.901702
| 0.863262
| 0.863262
| 0
| 0
| 0.220171
| 2,925
| 74
| 73
| 39.527027
| 0.798334
| 0
| 0
| 0.793103
| 0
| 0
| 0.056068
| 0.044444
| 0
| 0
| 0
| 0
| 0.206897
| 1
| 0.034483
| false
| 0
| 0.086207
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5a3467fef4c85ca377134d1476c5e33210eb1f54
| 71
|
py
|
Python
|
more_itertools/__init__.py
|
ethanaward/more-itertools
|
bf63113553a3dfb6a77104669d80c2cf5d082a60
|
[
"MIT"
] | 1
|
2019-04-23T21:50:08.000Z
|
2019-04-23T21:50:08.000Z
|
more_itertools/__init__.py
|
ethanaward/more-itertools
|
bf63113553a3dfb6a77104669d80c2cf5d082a60
|
[
"MIT"
] | null | null | null |
more_itertools/__init__.py
|
ethanaward/more-itertools
|
bf63113553a3dfb6a77104669d80c2cf5d082a60
|
[
"MIT"
] | 2
|
2019-02-14T08:13:33.000Z
|
2019-04-23T21:47:48.000Z
|
from more_itertools.more import *
from more_itertools.recipes import *
| 23.666667
| 36
| 0.830986
| 10
| 71
| 5.7
| 0.5
| 0.280702
| 0.596491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 37
| 35.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5a71b73bc1f5e4bbb6875571181188e5b6e50984
| 17,346
|
py
|
Python
|
flash/text/question_answering/data.py
|
Actis92/lightning-flash
|
49972268cfc0f95f1bd2b8fbf25036970cc44b59
|
[
"Apache-2.0"
] | 1,457
|
2021-01-28T20:40:16.000Z
|
2022-03-31T06:22:05.000Z
|
flash/text/question_answering/data.py
|
Actis92/lightning-flash
|
49972268cfc0f95f1bd2b8fbf25036970cc44b59
|
[
"Apache-2.0"
] | 1,123
|
2021-01-28T20:37:56.000Z
|
2022-03-31T19:34:44.000Z
|
flash/text/question_answering/data.py
|
Actis92/lightning-flash
|
49972268cfc0f95f1bd2b8fbf25036970cc44b59
|
[
"Apache-2.0"
] | 170
|
2021-01-29T00:41:39.000Z
|
2022-03-29T16:09:52.000Z
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional, Type, Union
from flash.core.data.data_module import DataModule
from flash.core.data.data_pipeline import DataPipelineState
from flash.core.data.io.input import Input
from flash.core.data.utilities.paths import PATH_TYPE
from flash.core.utilities.stages import RunningStage
from flash.core.utilities.types import INPUT_TRANSFORM_TYPE
from flash.text.question_answering.input import (
QuestionAnsweringCSVInput,
QuestionAnsweringDictionaryInput,
QuestionAnsweringJSONInput,
QuestionAnsweringSQuADInput,
)
from flash.text.question_answering.input_transform import QuestionAnsweringInputTransform
from flash.text.question_answering.output_transform import QuestionAnsweringOutputTransform
class QuestionAnsweringData(DataModule):
"""Data module for QuestionAnswering task."""
input_transform_cls = QuestionAnsweringInputTransform
output_transform_cls = QuestionAnsweringOutputTransform
@classmethod
def from_csv(
cls,
train_file: Optional[PATH_TYPE] = None,
val_file: Optional[PATH_TYPE] = None,
test_file: Optional[PATH_TYPE] = None,
predict_file: Optional[PATH_TYPE] = None,
train_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
val_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
test_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
input_cls: Type[Input] = QuestionAnsweringCSVInput,
transform_kwargs: Optional[Dict] = None,
max_source_length: int = 384,
max_target_length: int = 30,
padding: Union[str, bool] = "max_length",
question_column_name: str = "question",
context_column_name: str = "context",
answer_column_name: str = "answer",
doc_stride: int = 128,
**data_module_kwargs: Any,
) -> "QuestionAnsweringData":
"""Creates a :class:`~flash.text.question_answering.data.QuestionAnsweringData` object from the given CSV
files.
Args:
train_file: The CSV file containing the training data.
val_file: The CSV file containing the validation data.
test_file: The CSV file containing the testing data.
predict_file: The CSV file containing the data to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
max_source_length: Max length of the sequence to be considered during tokenization.
max_target_length: Max length of each answer to be produced.
padding: Padding type during tokenization.
question_column_name: The key in the JSON file to recognize the question field.
context_column_name: The key in the JSON file to recognize the context field.
answer_column_name: The key in the JSON file to recognize the answer field.
doc_stride: The stride amount to be taken when splitting up a long document into chunks.
Returns:
The constructed data module.
"""
ds_kw = dict(
max_source_length=max_source_length,
max_target_length=max_target_length,
padding=padding,
question_column_name=question_column_name,
context_column_name=context_column_name,
answer_column_name=answer_column_name,
doc_stride=doc_stride,
data_pipeline_state=DataPipelineState(),
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
)
return cls(
input_cls(RunningStage.TRAINING, train_file, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_file, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_file, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_file, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_json(
cls,
train_file: Optional[PATH_TYPE] = None,
val_file: Optional[PATH_TYPE] = None,
test_file: Optional[PATH_TYPE] = None,
predict_file: Optional[PATH_TYPE] = None,
train_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
val_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
test_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
input_cls: Type[Input] = QuestionAnsweringJSONInput,
transform_kwargs: Optional[Dict] = None,
field: Optional[str] = None,
max_source_length: int = 384,
max_target_length: int = 30,
padding: Union[str, bool] = "max_length",
question_column_name: str = "question",
context_column_name: str = "context",
answer_column_name: str = "answer",
doc_stride: int = 128,
**data_module_kwargs: Any,
) -> "QuestionAnsweringData":
"""Creates a :class:`~flash.text.question_answering.data.QuestionAnsweringData` object from the given JSON
files.
Args:
train_file: The JSON file containing the training data.
val_file: The JSON file containing the validation data.
test_file: The JSON file containing the testing data.
predict_file: The JSON file containing the data to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
field: The field that holds the data in the JSON file.
max_source_length: Max length of the sequence to be considered during tokenization.
max_target_length: Max length of each answer to be produced.
padding: Padding type during tokenization.
question_column_name: The key in the JSON file to recognize the question field.
context_column_name: The key in the JSON file to recognize the context field.
answer_column_name: The key in the JSON file to recognize the answer field.
doc_stride: The stride amount to be taken when splitting up a long document into chunks.
Returns:
The constructed data module.
"""
ds_kw = dict(
field=field,
max_source_length=max_source_length,
max_target_length=max_target_length,
padding=padding,
question_column_name=question_column_name,
context_column_name=context_column_name,
answer_column_name=answer_column_name,
doc_stride=doc_stride,
data_pipeline_state=DataPipelineState(),
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
)
return cls(
input_cls(RunningStage.TRAINING, train_file, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_file, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_file, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_file, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_squad_v2(
cls,
train_file: Optional[str] = None,
val_file: Optional[str] = None,
test_file: Optional[str] = None,
predict_file: Optional[str] = None,
train_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
val_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
test_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
input_cls: Type[Input] = QuestionAnsweringSQuADInput,
transform_kwargs: Optional[Dict] = None,
max_source_length: int = 384,
max_target_length: int = 30,
padding: Union[str, bool] = "max_length",
question_column_name: str = "question",
context_column_name: str = "context",
answer_column_name: str = "answer",
doc_stride: int = 128,
**data_module_kwargs: Any,
) -> "QuestionAnsweringData":
"""Creates a :class:`~flash.text.question_answering.data.QuestionAnsweringData` object from the given data
JSON files in the SQuAD2.0 format.
Args:
train_file: The JSON file containing the training data.
val_file: The JSON file containing the validation data.
test_file: The JSON file containing the testing data.
predict_file: The JSON file containing the predict data.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
max_source_length: Max length of the sequence to be considered during tokenization.
max_target_length: Max length of each answer to be produced.
padding: Padding type during tokenization.
question_column_name: The key in the JSON file to recognize the question field.
context_column_name: The key in the JSON file to recognize the context field.
answer_column_name: The key in the JSON file to recognize the answer field.
doc_stride: The stride amount to be taken when splitting up a long document into chunks.
Returns:
The constructed data module.
"""
ds_kw = dict(
max_source_length=max_source_length,
max_target_length=max_target_length,
padding=padding,
question_column_name=question_column_name,
context_column_name=context_column_name,
answer_column_name=answer_column_name,
doc_stride=doc_stride,
data_pipeline_state=DataPipelineState(),
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
)
return cls(
input_cls(RunningStage.TRAINING, train_file, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_file, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_file, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_file, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_dicts(
cls,
train_data: Optional[Dict[str, Any]] = None,
val_data: Optional[Dict[str, Any]] = None,
test_data: Optional[Dict[str, Any]] = None,
predict_data: Optional[Dict[str, Any]] = None,
train_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
val_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
test_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = QuestionAnsweringInputTransform,
input_cls: Type[Input] = QuestionAnsweringDictionaryInput,
transform_kwargs: Optional[Dict] = None,
max_source_length: int = 384,
max_target_length: int = 30,
padding: Union[str, bool] = "max_length",
question_column_name: str = "question",
context_column_name: str = "context",
answer_column_name: str = "answer",
doc_stride: int = 128,
**data_module_kwargs: Any,
) -> "QuestionAnsweringData":
"""Creates a :class:`~flash.text.question_answering.data.QuestionAnsweringData` object from the given data
dictionaries.
Args:
train_data: The dictionary containing the training data.
val_data: The dictionary containing the validation data.
test_data: The dictionary containing the testing data.
predict_data: The dictionary containing the data to use when predicting.
train_transform: The dictionary of transforms to use during training which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
val_transform: The dictionary of transforms to use during validation which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
test_transform: The dictionary of transforms to use during testing which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
predict_transform: The dictionary of transforms to use during predicting which maps
:class:`~flash.core.data.io.input_transform.InputTransform` hook names to callable transforms.
max_source_length: Max length of the sequence to be considered during tokenization.
max_target_length: Max length of each answer to be produced.
padding: Padding type during tokenization.
question_column_name: The key in the JSON file to recognize the question field.
context_column_name: The key in the JSON file to recognize the context field.
answer_column_name: The key in the JSON file to recognize the answer field.
doc_stride: The stride amount to be taken when splitting up a long document into chunks.
Returns:
The constructed data module.
"""
ds_kw = dict(
max_source_length=max_source_length,
max_target_length=max_target_length,
padding=padding,
question_column_name=question_column_name,
context_column_name=context_column_name,
answer_column_name=answer_column_name,
doc_stride=doc_stride,
data_pipeline_state=DataPipelineState(),
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
)
return cls(
input_cls(RunningStage.TRAINING, train_data, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_data, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_data, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_data, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
| 53.537037
| 114
| 0.695665
| 2,022
| 17,346
| 5.746785
| 0.089515
| 0.041308
| 0.01988
| 0.021945
| 0.871429
| 0.845611
| 0.829346
| 0.829346
| 0.814888
| 0.814888
| 0
| 0.002962
| 0.240978
| 17,346
| 323
| 115
| 53.702786
| 0.879614
| 0.43203
| 0
| 0.73913
| 0
| 0
| 0.023024
| 0.009298
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.054348
| 0
| 0.11413
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce4e8278cb78fc3e145561a63d3c1758017bafb3
| 98,147
|
py
|
Python
|
iclientpy/test/rest/api/test_restdata.py
|
SuperMap/iClientPython
|
5e741c1efda9b12d321909428bd5c95e22482ea7
|
[
"Apache-2.0"
] | 28
|
2018-04-19T07:11:34.000Z
|
2022-02-24T08:31:08.000Z
|
iclientpy/test/rest/api/test_restdata.py
|
SuperMap/iClientPython
|
5e741c1efda9b12d321909428bd5c95e22482ea7
|
[
"Apache-2.0"
] | 3
|
2019-05-06T07:58:45.000Z
|
2020-11-18T21:16:38.000Z
|
iclientpy/test/rest/api/test_restdata.py
|
SuperMap/iClientPython
|
5e741c1efda9b12d321909428bd5c95e22482ea7
|
[
"Apache-2.0"
] | 8
|
2018-06-14T02:29:53.000Z
|
2020-11-03T01:36:35.000Z
|
import httpretty
from typing import List
from iclientpy.rest.api.model import Feature
from iclientpy.dtojson import deserializer
from iclientpy.rest.decorator import HttpMethod
from iclientpy.rest.api.model import *
from iclientpy.rest.apifactory import APIFactory
from iclientpy.rest.api.restdata import DataService
from .abstractrest import AbstractRESTTestCase
from unittest import mock
class RESTDataTest(AbstractRESTTestCase):
@classmethod
def setUpClass(cls):
cls.init_rest(cls)
cls.init_apifactory(cls)
cls.init_api(cls, "data_service", "data-World/rest")
@mock.patch('builtins.open', mock.mock_open(read_data='1'))
def test_dataservice(self):
jsonstr = '{"datasourceNames":["World"],"childUriList":["http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/name/World"],"datasourceCount":1}'
self.check_api(DataService.get_datasources,
self.baseuri + "/services/data-World/rest/data/datasources.json", HttpMethod.GET,
httpretty.Response(status=200, body=jsonstr))
jsonstr = '{"datasourceInfo":{"distanceUnit":"METER","coordUnit":"DEGREE","name":"World","description":"testtest","engineType":"UDB","prjCoordSys":{"distanceUnit":"METER","projectionParam":null,"epsgCode":4326,"coordUnit":"DEGREE","name":"Longitude / Latitude Coordinate System---GCS_WGS_1984","projection":null,"type":"PCS_EARTH_LONGITUDE_LATITUDE","coordSystem":{"datum":{"name":"D_WGS_1984","type":"DATUM_WGS_1984","spheroid":{"flatten":0.00335281066474748,"name":"WGS_1984","axis":6378137,"type":"SPHEROID_WGS_1984"}},"unit":"DEGREE","spatialRefType":"SPATIALREF_EARTH_LONGITUDE_LATITUDE","name":"GCS_WGS_1984","type":"GCS_WGS_1984","primeMeridian":{"longitudeValue":0,"name":"Greenwich","type":"PRIMEMERIDIAN_GREENWICH"}}}},"childUriList":["http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets"]}'
self.check_api(DataService.get_datasource,
self.baseuri + "/services/data-World/rest/data/datasources/World.json", HttpMethod.GET,
httpretty.Response(status=200, body=jsonstr), datasource='World')
item = PutDatasourceItem()
item.description = 'testtest'
self.check_api(DataService.put_datasource,
self.baseuri + "/services/data-World/rest/data/datasources/World.json", HttpMethod.PUT,
httpretty.Response(status=200,
body='{"postResultType":"CreateChild","newResourceID":"/test13","succeed":true,"newResourceLocation":"http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/test13.json"}'),
datasource='World', entity=item)
jsonstr = '{"datasetCount":46,"datasetNames":["LandCover","WorldEarth","worldimage","Night","LandCover_1","LandCover_2","LandCover_3","Ocean","Ocean_Label","Country_Label","Continent_Label","Lakes","Rivers","Grids","continent_T","Ocean_Label_1","Countries","Capitals","OceanLabel","CountryLabel1","CountryLabel","continent_T_1","Ocean_Label_1_1","Capitals_1","Countries_1","Rivers_1","Lakes_1","Ocean_Label_2","continent_T_2","Ocean_Label_1_2","Capitals_2","Countries_2","Rivers_2","Lakes_2","Ocean_Label_3","continent_T_3","Ocean_Label_1_3","Capitals_3","Countries_3","Rivers_3","Lakes_3","Ocean_Label_4","LandCover2","test","test2","test13"],"childUriList":["http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/WorldEarth","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/worldimage","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Night","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Country_Label","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Continent_Label","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Lakes","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Rivers","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Grids","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Countries","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Capitals","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/OceanLabel","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/CountryLabel1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/CountryLabel","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label_1_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Capitals_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Countries_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Rivers_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Lakes_1","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label_1_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Capitals_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Countries_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Rivers_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Lakes_2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label_1_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Capitals_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Countries_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Rivers_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Lakes_3","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/Ocean_Label_4","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/test","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/test2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/test13"]}'
self.check_api(DataService.get_datasets,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets.json", HttpMethod.GET,
httpretty.Response(status=200, body=jsonstr), datasource='World')
entity = CopyDatasetItem()
entity.srcDatasourceName = 'World'
entity.srcDatasetName = 'continent_T_1'
entity.destDatasetName = 'test'
self.check_api(DataService.copy_dataset,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets.json", HttpMethod.POST,
httpretty.Response(status=200, body='{"succeed": true}'), datasource='World', entity=entity)
entity = CreateDatasetItem()
entity.datasetName = 'test2'
entity.datasetType = DatasetType.LINE
entity.isFileCache = False
self.check_api(DataService.create_dataset,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/test2.json",
HttpMethod.PUT,
httpretty.Response(status=200, body='{"succeed": true}'), datasource='World', dataset='test2',
entity=entity)
jsonstr = '{"childUriList":["http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover/fields","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover/features","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/LandCover/domain"],"supportAttachments":false,"supportFeatureMetadatas":false,"datasetInfo":{"pixelFormat":"BIT32","maxValue":13,"description":"","type":"GRID","blockSize":256,"dataSourceName":"World","tableName":"LandCover","noValue":-9999,"minValue":0,"isReadOnly":false,"encodeType":"SGL","width":5760,"bounds":{"top":90,"left":-180,"bottom":-90,"leftBottom":{"x":-180,"y":-90},"right":180,"rightTop":{"x":180,"y":90}},"name":"LandCover","prjCoordSys":{"distanceUnit":"METER","projectionParam":null,"epsgCode":4326,"coordUnit":"DEGREE","name":"Longitude / Latitude Coordinate System---GCS_WGS_1984","projection":null,"type":"PCS_EARTH_LONGITUDE_LATITUDE","coordSystem":{"datum":{"name":"D_WGS_1984","type":"DATUM_WGS_1984","spheroid":{"flatten":0.00335281066474748,"name":"WGS_1984","axis":6378137,"type":"SPHEROID_WGS_1984"}},"unit":"DEGREE","spatialRefType":"SPATIALREF_EARTH_LONGITUDE_LATITUDE","name":"GCS_WGS_1984","type":"GCS_WGS_1984","primeMeridian":{"longitudeValue":0,"name":"Greenwich","type":"PRIMEMERIDIAN_GREENWICH"}}},"datasourceConnectionInfo":null,"height":2880}}'
self.check_api(DataService.get_dataset,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/test2.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasource='World',
dataset='test2')
entity = PutDatasetItem()
entity.description = "blablablabla"
self.check_api(DataService.put_dataset,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/test2.json",
HttpMethod.PUT, httpretty.Response(status=200, body=jsonstr), datasource='World',
dataset='test2', entity=entity)#TODO 这个地方疑似存在问题
self.check_api(DataService.delete_dataset,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/test2.json",
HttpMethod.DELETE, httpretty.Response(status=200, body='{"succeed": true}'), datasource='World',
dataset='test2')
jsonstr = '[{"fieldNames":["SMID","SMSDRIW","SMSDRIN","SMSDRIE","SMSDRIS","SMUSERID","SMAREA","SMPERIMETER","SMGEOMETRYSIZE","SQKM","SQMI","COLOR_MAP","CAPITAL","COUNTRY","POP_1994","CONTINENT"],"fieldValues":["22","-7.433472633361816","62.35749816894531","-6.38972282409668","61.388328552246094","6","0.25430895154659083","5.743731026651685","4500","1474.69","569.38","5","示例首都a","示例国家a","47067.0","亚洲"],"geometry":{"id":22,"parts":[3],"points":[{"x":-40,"y":60},{"x":-45,"y":62},{"x":-40,"y":55},{"x":-40,"y":60}],"style":null,"type":"REGION"}},{"fieldNames":["SMID","SMSDRIW","SMSDRIN","SMSDRIE","SMSDRIS","SMUSERID","SMAREA","SMPERIMETER","SMGEOMETRYSIZE","SQKM","SQMI","COLOR_MAP","CAPITAL","COUNTRY","POP_1994","CONTINENT"],"fieldValues":["23","-7.433472633361816","62.35749816894531","-6.38972282409668","61.388328552246094","6","0.25430895154659083","5.743731026651685","4500","1474.69","569.38","5","示例首都b","示例国家b","47067.0","亚洲"],"geometry":{"id":23,"parts":[3],"points":[{"x":-40,"y":60},{"x":-45,"y":62},{"x":-40,"y":55},{"x":-40,"y":60}],"style":null,"type":"REGION","prjCoordSys":null}}]'
features = deserializer(List[Feature])(jsonstr)
self.check_api(DataService.post_features,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/Countries/features.json",
HttpMethod.POST, httpretty.Response(status=200, body='{"succeed": true}'),
datasourceName='World', datasetName='Countries', entity=features, isUseBatch=True)
self.check_api(DataService.get_features,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/Countries/features.json",
HttpMethod.GET, httpretty.Response(status=200,
body='{"startIndex": 0,"childUriList": ["http://192.168.20.158:8090/iserver/services/data-World/rest/data/feature/0-13-0", "http://192.168.20.158:8090/iserver/services/data-World/rest/data/feature/0-13-1"], "geometryType":"REGION","featureCount": 247}'),
datasourceName='World', datasetName='Countries', fromIndex=3, toIndex=5)
jsonstr = '{"fieldNames":["SMID","SMSDRIW","SMSDRIN","SMSDRIE","SMSDRIS","SMUSERID","SMGEOMETRYSIZE"],"geometry":{"texts":["北美洲"],"center":{"x":-102.51862991556618,"y":49.37313255456132},"parts":null,"style":null,"prjCoordSys":null,"textStyle":{"italicAngle":0,"shadow":false,"sizeFixed":false,"underline":false,"rotation":0,"backOpaque":false,"bold":false,"align":"MIDDLECENTER","foreColor":{"red":127,"green":127,"blue":127,"alpha":0},"italic":false,"strikeout":false,"fontName":"文泉驿微米黑","outline":true,"borderSpacingWidth":4,"backColor":{"red":255,"green":255,"blue":255,"alpha":0},"outlineWidth":1,"fontHeight":7.604914623695876,"fontWidth":0,"opaqueRate":100,"stringAlignment":"LEFT","fontScale":1,"fontWeight":0},"id":1,"type":"TEXT","rotations":[0],"partTopo":null,"points":[{"x":-102.51862991556618,"y":49.37313255456132}]},"fieldValues":["1","-114.38229","53.17559","-90.65496","45.570675","0","107"],"ID":1}'
self.check_api(DataService.get_feature,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/features/1.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr),
datasourceName='World', datasetName='continent_T', featureId='1')
entity = PutFeatureItem()
entity.geometry = Geometry()
entity.fieldNames = []
entity.fieldValues = []
self.check_api(DataService.put_feature,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/features/1.json",
HttpMethod.PUT, httpretty.Response(status=200, body='{"succeed": true}'),
datasourceName='World', datasetName='continent_T', featureId='1', entity='entity')
self.check_api(DataService.delete_feature,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/features/1.json",
HttpMethod.DELETE, httpretty.Response(status=200, body='{"succeed": true}'),
datasourceName='World', datasetName='continent_T', featureId='1')
self.check_api(DataService.get_attachments,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/features/1/attachments.json",
HttpMethod.GET, httpretty.Response(status=200,
body='[{"size":28002,"name":"World.bru","id":1,"contentType":"application/octet-stream"}]'),
datasourceName='World', datasetName='continent_T', featureId='1')
with open('./World.zip', 'rb') as fileb:
self.check_api(DataService.post_attachments,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/features/1/attachments.json",
HttpMethod.POST, httpretty.Response(status=200,
body='[{"size":28002,"name":"World.bru","id":1,"contentType":"application/octet-stream"}]'),
datasourceName='World', datasetName='continent_T', featureId='1', file=fileb)
self.check_api(DataService.get_metadata,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/features/1/metadata.json",
HttpMethod.GET, httpretty.Response(status=200,
body='{"createTime":1436945830474,"createUser":"admin","lastEditTime":1436945830474,"lastEditUser":"admin"}'),
datasourceName='World', datasetName='continent_T', featureId='1')
jsonstr = '{"fieldNames":["SMID","SMSDRIW","SMSDRIN","SMSDRIE","SMSDRIS","SMUSERID","SMGEOMETRYSIZE"],"geometry":{"texts":["北美洲"],"center":{"x":-102.51862991556618,"y":49.37313255456132},"parts":null,"style":null,"prjCoordSys":null,"textStyle":{"italicAngle":0,"shadow":false,"sizeFixed":false,"underline":false,"rotation":0,"backOpaque":false,"bold":false,"align":"MIDDLECENTER","foreColor":{"red":127,"green":127,"blue":127,"alpha":0},"italic":false,"strikeout":false,"fontName":"文泉驿微米黑","outline":true,"borderSpacingWidth":4,"backColor":{"red":255,"green":255,"blue":255,"alpha":0},"outlineWidth":1,"fontHeight":7.604914623695876,"fontWidth":0,"opaqueRate":100,"stringAlignment":"LEFT","fontScale":1,"fontWeight":0},"id":1,"type":"TEXT","rotations":[0],"partTopo":null,"points":[{"x":-102.51862991556618,"y":49.37313255456132}]},"fieldValues":["1","-114.38229","53.17559","-90.65496","45.570675","0","107"],"ID":1}'
self.check_api(DataService.get_feature_by_url,
'http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0.json',
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr),
feature_url='http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0')
entity = PutFeatureItem()
entity.geometry = Geometry()
entity.fieldNames = []
entity.fieldValues = []
self.check_api(DataService.put_feature_by_url,
'http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0.json',
HttpMethod.PUT, httpretty.Response(status=200, body='{"succeed": true}'),
feature_url='http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0',
entity='entity')
self.check_api(DataService.delete_feature_by_url,
'http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0.json',
HttpMethod.DELETE, httpretty.Response(status=200, body='{"succeed": true}'),
feature_url='http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0')
self.check_api(DataService.get_attachments_by_url,
'http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0/attachments.json',
HttpMethod.GET, httpretty.Response(status=200,
body='[{"size":28002,"name":"World.bru","id":1,"contentType":"application/octet-stream"}]'),
feature_url='http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0')
with open('./World.zip', 'rb') as filea:
self.check_api(DataService.post_attachments_by_url,
'http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0/attachments.json',
HttpMethod.POST, httpretty.Response(status=200,
body='[{"size":28002,"name":"World.bru","id":1,"contentType":"application/octet-stream"}]'),
feature_url='http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0',
file=filea)
self.check_api(DataService.get_metadata_by_url,
'http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0/metadata.json',
HttpMethod.GET, httpretty.Response(status=200,
body='{"createTime":1436945830474,"createUser":"admin","lastEditTime":1436945830474,"lastEditUser":"admin"}'),
feature_url='http://192.168.20.182:8090/iserver/services/data-World/rest/data/feature/0-19-0')
jsonstr = '[{"isRequired":true,"defaultValue":"","name":"SmID","caption":"SmID","type":"INT32","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":true},{"isRequired":true,"defaultValue":"0","name":"SmSdriW","caption":"SmSdriW","type":"SINGLE","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":true},{"isRequired":true,"defaultValue":"0","name":"SmSdriN","caption":"SmSdriN","type":"SINGLE","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":true},{"isRequired":true,"defaultValue":"0","name":"SmSdriE","caption":"SmSdriE","type":"SINGLE","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":true},{"isRequired":true,"defaultValue":"0","name":"SmSdriS","caption":"SmSdriS","type":"SINGLE","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":true},{"isRequired":true,"defaultValue":"0","name":"SmUserID","caption":"SmUserID","type":"INT32","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":false},{"isRequired":false,"defaultValue":"0","name":"SmGeometrySize","caption":"SmGeometrySize","type":"INT32","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":true}]'
self.check_api(DataService.get_fields,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/fields.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T')
entity = FieldInfo()
entity.type = FieldType.TEXT
entity.name = "test"
entity.caption = "test"
entity.isRequired = True
entity.defaultValue = 'default'
entity.maxLength = 20
entity.isZeroLengthAllowed = True
entity.isSystemField = False
self.check_api(DataService.post_fields,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/fields.json",
HttpMethod.POST, httpretty.Response(status=200, body='{"succeed": true}'),
datasourceName='World',
datasetName='continent_T', entity=entity)
jsonstr = '{"childUriList":["http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/SmID/MAX","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/SmID/MIN","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/SmID/AVERAGE","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/SmID/STDDEVIATION","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/SmID/SUM","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/SmID/VARIANCE"],"fieldInfo":{"isRequired":true,"defaultValue":"","name":"SmID","caption":"SmID","type":"INT32","maxLength":4,"isZeroLengthAllowed":true,"isSystemField":true}}'
self.check_api(DataService.get_field,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/smid.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T', field='smid')
self.check_api(DataService.put_field,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/smid.json",
HttpMethod.PUT, httpretty.Response(status=200, body='{"succeed": true}'), datasourceName='World',
datasetName='continent_T', field='smid', entity=entity)
self.check_api(DataService.delete_field,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/smid.json",
HttpMethod.DELETE, httpretty.Response(status=200, body='{"succeed": true}'),
datasourceName='World', datasetName='continent_T', field='smid')
self.check_api(DataService.get_statistic,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/fields/smid/MAX.json",
HttpMethod.GET, httpretty.Response(status=200, body='{"result":7,"mode":"MAX"}'),
datasourceName='World', datasetName='continent_T', field='smid',
statisticMode=StatisticMode.MAX.value)
jsonstr = '{"codeCount":11,"codeInfos":[{"value":"城市","valueType":"TEXT"},{"value":"旱地","valueType":"TEXT"},{"value":"水浇地","valueType":"TEXT"},{"value":"水田","valueType":"TEXT"},{"value":"沙漠","valueType":"TEXT"},{"value":"沼泽","valueType":"TEXT"},{"value":"湖泊水库","valueType":"TEXT"},{"value":"灌丛","valueType":"TEXT"},{"value":"用材林","valueType":"TEXT"},{"value":"经济林","valueType":"TEXT"},{"value":"草地","valueType":"TEXT"}],"description":"","fieldName":"LANDTYPE","name":"LANDTYPE","type":"CODE","valueType":"TEXT"},{"description":"","fieldName":"Area","name":"Area","rangeCount":1,"rangeInfos":[{"max":"999999.0","min":"0.0","type":"CLOSE_OPEN","valueType":"SINGLE"}],"type":"RANGE","valueType":"SINGLE"}]'
self.check_api(DataService.get_domain,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/domain.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T')
jsonstr = '{"valuesCount":256,"values":[[{"column":4678,"row":869,"value":10,"centerPoint":{"x":112.40625,"y":35.65625}},{"column":4679,"row":869,"value":10,"centerPoint":{"x":112.46875,"y":35.65625}},{"column":4680,"row":869,"value":10,"centerPoint":{"x":112.53125,"y":35.65625}},{"column":4681,"row":869,"value":10,"centerPoint":{"x":112.59375,"y":35.65625}},{"column":4682,"row":869,"value":10,"centerPoint":{"x":112.65625,"y":35.65625}},{"column":4683,"row":869,"value":10,"centerPoint":{"x":112.71875,"y":35.65625}},{"column":4684,"row":869,"value":11,"centerPoint":{"x":112.78125,"y":35.65625}},{"column":4685,"row":869,"value":11,"centerPoint":{"x":112.84375,"y":35.65625}},{"column":4686,"row":869,"value":11,"centerPoint":{"x":112.90625,"y":35.65625}},{"column":4687,"row":869,"value":11,"centerPoint":{"x":112.96875,"y":35.65625}},{"column":4688,"row":869,"value":11,"centerPoint":{"x":113.03125,"y":35.65625}},{"column":4689,"row":869,"value":11,"centerPoint":{"x":113.09375,"y":35.65625}},{"column":4690,"row":869,"value":11,"centerPoint":{"x":113.15625,"y":35.65625}},{"column":4691,"row":869,"value":1,"centerPoint":{"x":113.21875,"y":35.65625}},{"column":4692,"row":869,"value":1,"centerPoint":{"x":113.28125,"y":35.65625}},{"column":4693,"row":869,"value":1,"centerPoint":{"x":113.34375,"y":35.65625}}],[{"column":4678,"row":870,"value":10,"centerPoint":{"x":112.40625,"y":35.59375}},{"column":4679,"row":870,"value":10,"centerPoint":{"x":112.46875,"y":35.59375}},{"column":4680,"row":870,"value":10,"centerPoint":{"x":112.53125,"y":35.59375}},{"column":4681,"row":870,"value":10,"centerPoint":{"x":112.59375,"y":35.59375}},{"column":4682,"row":870,"value":10,"centerPoint":{"x":112.65625,"y":35.59375}},{"column":4683,"row":870,"value":11,"centerPoint":{"x":112.71875,"y":35.59375}},{"column":4684,"row":870,"value":11,"centerPoint":{"x":112.78125,"y":35.59375}},{"column":4685,"row":870,"value":11,"centerPoint":{"x":112.84375,"y":35.59375}},{"column":4686,"row":870,"value":11,"centerPoint":{"x":112.90625,"y":35.59375}},{"column":4687,"row":870,"value":11,"centerPoint":{"x":112.96875,"y":35.59375}},{"column":4688,"row":870,"value":11,"centerPoint":{"x":113.03125,"y":35.59375}},{"column":4689,"row":870,"value":11,"centerPoint":{"x":113.09375,"y":35.59375}},{"column":4690,"row":870,"value":1,"centerPoint":{"x":113.15625,"y":35.59375}},{"column":4691,"row":870,"value":1,"centerPoint":{"x":113.21875,"y":35.59375}},{"column":4692,"row":870,"value":1,"centerPoint":{"x":113.28125,"y":35.59375}},{"column":4693,"row":870,"value":1,"centerPoint":{"x":113.34375,"y":35.59375}}],[{"column":4678,"row":871,"value":10,"centerPoint":{"x":112.40625,"y":35.53125}},{"column":4679,"row":871,"value":10,"centerPoint":{"x":112.46875,"y":35.53125}},{"column":4680,"row":871,"value":10,"centerPoint":{"x":112.53125,"y":35.53125}},{"column":4681,"row":871,"value":10,"centerPoint":{"x":112.59375,"y":35.53125}},{"column":4682,"row":871,"value":11,"centerPoint":{"x":112.65625,"y":35.53125}},{"column":4683,"row":871,"value":11,"centerPoint":{"x":112.71875,"y":35.53125}},{"column":4684,"row":871,"value":11,"centerPoint":{"x":112.78125,"y":35.53125}},{"column":4685,"row":871,"value":10,"centerPoint":{"x":112.84375,"y":35.53125}},{"column":4686,"row":871,"value":11,"centerPoint":{"x":112.90625,"y":35.53125}},{"column":4687,"row":871,"value":11,"centerPoint":{"x":112.96875,"y":35.53125}},{"column":4688,"row":871,"value":11,"centerPoint":{"x":113.03125,"y":35.53125}},{"column":4689,"row":871,"value":11,"centerPoint":{"x":113.09375,"y":35.53125}},{"column":4690,"row":871,"value":1,"centerPoint":{"x":113.15625,"y":35.53125}},{"column":4691,"row":871,"value":1,"centerPoint":{"x":113.21875,"y":35.53125}},{"column":4692,"row":871,"value":1,"centerPoint":{"x":113.28125,"y":35.53125}},{"column":4693,"row":871,"value":1,"centerPoint":{"x":113.34375,"y":35.53125}}],[{"column":4678,"row":872,"value":10,"centerPoint":{"x":112.40625,"y":35.46875}},{"column":4679,"row":872,"value":11,"centerPoint":{"x":112.46875,"y":35.46875}},{"column":4680,"row":872,"value":11,"centerPoint":{"x":112.53125,"y":35.46875}},{"column":4681,"row":872,"value":11,"centerPoint":{"x":112.59375,"y":35.46875}},{"column":4682,"row":872,"value":11,"centerPoint":{"x":112.65625,"y":35.46875}},{"column":4683,"row":872,"value":10,"centerPoint":{"x":112.71875,"y":35.46875}},{"column":4684,"row":872,"value":10,"centerPoint":{"x":112.78125,"y":35.46875}},{"column":4685,"row":872,"value":10,"centerPoint":{"x":112.84375,"y":35.46875}},{"column":4686,"row":872,"value":11,"centerPoint":{"x":112.90625,"y":35.46875}},{"column":4687,"row":872,"value":11,"centerPoint":{"x":112.96875,"y":35.46875}},{"column":4688,"row":872,"value":11,"centerPoint":{"x":113.03125,"y":35.46875}},{"column":4689,"row":872,"value":1,"centerPoint":{"x":113.09375,"y":35.46875}},{"column":4690,"row":872,"value":1,"centerPoint":{"x":113.15625,"y":35.46875}},{"column":4691,"row":872,"value":11,"centerPoint":{"x":113.21875,"y":35.46875}},{"column":4692,"row":872,"value":11,"centerPoint":{"x":113.28125,"y":35.46875}},{"column":4693,"row":872,"value":11,"centerPoint":{"x":113.34375,"y":35.46875}}],[{"column":4678,"row":873,"value":11,"centerPoint":{"x":112.40625,"y":35.40625}},{"column":4679,"row":873,"value":11,"centerPoint":{"x":112.46875,"y":35.40625}},{"column":4680,"row":873,"value":11,"centerPoint":{"x":112.53125,"y":35.40625}},{"column":4681,"row":873,"value":11,"centerPoint":{"x":112.59375,"y":35.40625}},{"column":4682,"row":873,"value":10,"centerPoint":{"x":112.65625,"y":35.40625}},{"column":4683,"row":873,"value":10,"centerPoint":{"x":112.71875,"y":35.40625}},{"column":4684,"row":873,"value":10,"centerPoint":{"x":112.78125,"y":35.40625}},{"column":4685,"row":873,"value":11,"centerPoint":{"x":112.84375,"y":35.40625}},{"column":4686,"row":873,"value":11,"centerPoint":{"x":112.90625,"y":35.40625}},{"column":4687,"row":873,"value":10,"centerPoint":{"x":112.96875,"y":35.40625}},{"column":4688,"row":873,"value":10,"centerPoint":{"x":113.03125,"y":35.40625}},{"column":4689,"row":873,"value":10,"centerPoint":{"x":113.09375,"y":35.40625}},{"column":4690,"row":873,"value":11,"centerPoint":{"x":113.15625,"y":35.40625}},{"column":4691,"row":873,"value":11,"centerPoint":{"x":113.21875,"y":35.40625}},{"column":4692,"row":873,"value":10,"centerPoint":{"x":113.28125,"y":35.40625}},{"column":4693,"row":873,"value":10,"centerPoint":{"x":113.34375,"y":35.40625}}],[{"column":4678,"row":874,"value":11,"centerPoint":{"x":112.40625,"y":35.34375}},{"column":4679,"row":874,"value":11,"centerPoint":{"x":112.46875,"y":35.34375}},{"column":4680,"row":874,"value":11,"centerPoint":{"x":112.53125,"y":35.34375}},{"column":4681,"row":874,"value":11,"centerPoint":{"x":112.59375,"y":35.34375}},{"column":4682,"row":874,"value":11,"centerPoint":{"x":112.65625,"y":35.34375}},{"column":4683,"row":874,"value":11,"centerPoint":{"x":112.71875,"y":35.34375}},{"column":4684,"row":874,"value":11,"centerPoint":{"x":112.78125,"y":35.34375}},{"column":4685,"row":874,"value":11,"centerPoint":{"x":112.84375,"y":35.34375}},{"column":4686,"row":874,"value":11,"centerPoint":{"x":112.90625,"y":35.34375}},{"column":4687,"row":874,"value":11,"centerPoint":{"x":112.96875,"y":35.34375}},{"column":4688,"row":874,"value":11,"centerPoint":{"x":113.03125,"y":35.34375}},{"column":4689,"row":874,"value":11,"centerPoint":{"x":113.09375,"y":35.34375}},{"column":4690,"row":874,"value":11,"centerPoint":{"x":113.15625,"y":35.34375}},{"column":4691,"row":874,"value":10,"centerPoint":{"x":113.21875,"y":35.34375}},{"column":4692,"row":874,"value":10,"centerPoint":{"x":113.28125,"y":35.34375}},{"column":4693,"row":874,"value":11,"centerPoint":{"x":113.34375,"y":35.34375}}],[{"column":4678,"row":875,"value":1,"centerPoint":{"x":112.40625,"y":35.28125}},{"column":4679,"row":875,"value":11,"centerPoint":{"x":112.46875,"y":35.28125}},{"column":4680,"row":875,"value":11,"centerPoint":{"x":112.53125,"y":35.28125}},{"column":4681,"row":875,"value":11,"centerPoint":{"x":112.59375,"y":35.28125}},{"column":4682,"row":875,"value":11,"centerPoint":{"x":112.65625,"y":35.28125}},{"column":4683,"row":875,"value":11,"centerPoint":{"x":112.71875,"y":35.28125}},{"column":4684,"row":875,"value":11,"centerPoint":{"x":112.78125,"y":35.28125}},{"column":4685,"row":875,"value":11,"centerPoint":{"x":112.84375,"y":35.28125}},{"column":4686,"row":875,"value":11,"centerPoint":{"x":112.90625,"y":35.28125}},{"column":4687,"row":875,"value":11,"centerPoint":{"x":112.96875,"y":35.28125}},{"column":4688,"row":875,"value":11,"centerPoint":{"x":113.03125,"y":35.28125}},{"column":4689,"row":875,"value":11,"centerPoint":{"x":113.09375,"y":35.28125}},{"column":4690,"row":875,"value":11,"centerPoint":{"x":113.15625,"y":35.28125}},{"column":4691,"row":875,"value":11,"centerPoint":{"x":113.21875,"y":35.28125}},{"column":4692,"row":875,"value":11,"centerPoint":{"x":113.28125,"y":35.28125}},{"column":4693,"row":875,"value":11,"centerPoint":{"x":113.34375,"y":35.28125}}],[{"column":4678,"row":876,"value":11,"centerPoint":{"x":112.40625,"y":35.21875}},{"column":4679,"row":876,"value":11,"centerPoint":{"x":112.46875,"y":35.21875}},{"column":4680,"row":876,"value":11,"centerPoint":{"x":112.53125,"y":35.21875}},{"column":4681,"row":876,"value":11,"centerPoint":{"x":112.59375,"y":35.21875}},{"column":4682,"row":876,"value":11,"centerPoint":{"x":112.65625,"y":35.21875}},{"column":4683,"row":876,"value":11,"centerPoint":{"x":112.71875,"y":35.21875}},{"column":4684,"row":876,"value":11,"centerPoint":{"x":112.78125,"y":35.21875}},{"column":4685,"row":876,"value":11,"centerPoint":{"x":112.84375,"y":35.21875}},{"column":4686,"row":876,"value":11,"centerPoint":{"x":112.90625,"y":35.21875}},{"column":4687,"row":876,"value":11,"centerPoint":{"x":112.96875,"y":35.21875}},{"column":4688,"row":876,"value":11,"centerPoint":{"x":113.03125,"y":35.21875}},{"column":4689,"row":876,"value":11,"centerPoint":{"x":113.09375,"y":35.21875}},{"column":4690,"row":876,"value":11,"centerPoint":{"x":113.15625,"y":35.21875}},{"column":4691,"row":876,"value":11,"centerPoint":{"x":113.21875,"y":35.21875}},{"column":4692,"row":876,"value":11,"centerPoint":{"x":113.28125,"y":35.21875}},{"column":4693,"row":876,"value":11,"centerPoint":{"x":113.34375,"y":35.21875}}],[{"column":4678,"row":877,"value":11,"centerPoint":{"x":112.40625,"y":35.15625}},{"column":4679,"row":877,"value":11,"centerPoint":{"x":112.46875,"y":35.15625}},{"column":4680,"row":877,"value":11,"centerPoint":{"x":112.53125,"y":35.15625}},{"column":4681,"row":877,"value":11,"centerPoint":{"x":112.59375,"y":35.15625}},{"column":4682,"row":877,"value":11,"centerPoint":{"x":112.65625,"y":35.15625}},{"column":4683,"row":877,"value":11,"centerPoint":{"x":112.71875,"y":35.15625}},{"column":4684,"row":877,"value":11,"centerPoint":{"x":112.78125,"y":35.15625}},{"column":4685,"row":877,"value":11,"centerPoint":{"x":112.84375,"y":35.15625}},{"column":4686,"row":877,"value":11,"centerPoint":{"x":112.90625,"y":35.15625}},{"column":4687,"row":877,"value":11,"centerPoint":{"x":112.96875,"y":35.15625}},{"column":4688,"row":877,"value":11,"centerPoint":{"x":113.03125,"y":35.15625}},{"column":4689,"row":877,"value":11,"centerPoint":{"x":113.09375,"y":35.15625}},{"column":4690,"row":877,"value":11,"centerPoint":{"x":113.15625,"y":35.15625}},{"column":4691,"row":877,"value":11,"centerPoint":{"x":113.21875,"y":35.15625}},{"column":4692,"row":877,"value":11,"centerPoint":{"x":113.28125,"y":35.15625}},{"column":4693,"row":877,"value":11,"centerPoint":{"x":113.34375,"y":35.15625}}],[{"column":4678,"row":878,"value":11,"centerPoint":{"x":112.40625,"y":35.09375}},{"column":4679,"row":878,"value":11,"centerPoint":{"x":112.46875,"y":35.09375}},{"column":4680,"row":878,"value":11,"centerPoint":{"x":112.53125,"y":35.09375}},{"column":4681,"row":878,"value":11,"centerPoint":{"x":112.59375,"y":35.09375}},{"column":4682,"row":878,"value":11,"centerPoint":{"x":112.65625,"y":35.09375}},{"column":4683,"row":878,"value":11,"centerPoint":{"x":112.71875,"y":35.09375}},{"column":4684,"row":878,"value":11,"centerPoint":{"x":112.78125,"y":35.09375}},{"column":4685,"row":878,"value":11,"centerPoint":{"x":112.84375,"y":35.09375}},{"column":4686,"row":878,"value":11,"centerPoint":{"x":112.90625,"y":35.09375}},{"column":4687,"row":878,"value":11,"centerPoint":{"x":112.96875,"y":35.09375}},{"column":4688,"row":878,"value":11,"centerPoint":{"x":113.03125,"y":35.09375}},{"column":4689,"row":878,"value":11,"centerPoint":{"x":113.09375,"y":35.09375}},{"column":4690,"row":878,"value":11,"centerPoint":{"x":113.15625,"y":35.09375}},{"column":4691,"row":878,"value":11,"centerPoint":{"x":113.21875,"y":35.09375}},{"column":4692,"row":878,"value":11,"centerPoint":{"x":113.28125,"y":35.09375}},{"column":4693,"row":878,"value":11,"centerPoint":{"x":113.34375,"y":35.09375}}],[{"column":4678,"row":879,"value":11,"centerPoint":{"x":112.40625,"y":35.03125}},{"column":4679,"row":879,"value":11,"centerPoint":{"x":112.46875,"y":35.03125}},{"column":4680,"row":879,"value":11,"centerPoint":{"x":112.53125,"y":35.03125}},{"column":4681,"row":879,"value":11,"centerPoint":{"x":112.59375,"y":35.03125}},{"column":4682,"row":879,"value":11,"centerPoint":{"x":112.65625,"y":35.03125}},{"column":4683,"row":879,"value":11,"centerPoint":{"x":112.71875,"y":35.03125}},{"column":4684,"row":879,"value":11,"centerPoint":{"x":112.78125,"y":35.03125}},{"column":4685,"row":879,"value":11,"centerPoint":{"x":112.84375,"y":35.03125}},{"column":4686,"row":879,"value":11,"centerPoint":{"x":112.90625,"y":35.03125}},{"column":4687,"row":879,"value":11,"centerPoint":{"x":112.96875,"y":35.03125}},{"column":4688,"row":879,"value":11,"centerPoint":{"x":113.03125,"y":35.03125}},{"column":4689,"row":879,"value":11,"centerPoint":{"x":113.09375,"y":35.03125}},{"column":4690,"row":879,"value":11,"centerPoint":{"x":113.15625,"y":35.03125}},{"column":4691,"row":879,"value":11,"centerPoint":{"x":113.21875,"y":35.03125}},{"column":4692,"row":879,"value":11,"centerPoint":{"x":113.28125,"y":35.03125}},{"column":4693,"row":879,"value":11,"centerPoint":{"x":113.34375,"y":35.03125}}],[{"column":4678,"row":880,"value":10,"centerPoint":{"x":112.40625,"y":34.96875}},{"column":4679,"row":880,"value":10,"centerPoint":{"x":112.46875,"y":34.96875}},{"column":4680,"row":880,"value":11,"centerPoint":{"x":112.53125,"y":34.96875}},{"column":4681,"row":880,"value":11,"centerPoint":{"x":112.59375,"y":34.96875}},{"column":4682,"row":880,"value":11,"centerPoint":{"x":112.65625,"y":34.96875}},{"column":4683,"row":880,"value":11,"centerPoint":{"x":112.71875,"y":34.96875}},{"column":4684,"row":880,"value":11,"centerPoint":{"x":112.78125,"y":34.96875}},{"column":4685,"row":880,"value":11,"centerPoint":{"x":112.84375,"y":34.96875}},{"column":4686,"row":880,"value":11,"centerPoint":{"x":112.90625,"y":34.96875}},{"column":4687,"row":880,"value":11,"centerPoint":{"x":112.96875,"y":34.96875}},{"column":4688,"row":880,"value":11,"centerPoint":{"x":113.03125,"y":34.96875}},{"column":4689,"row":880,"value":11,"centerPoint":{"x":113.09375,"y":34.96875}},{"column":4690,"row":880,"value":11,"centerPoint":{"x":113.15625,"y":34.96875}},{"column":4691,"row":880,"value":11,"centerPoint":{"x":113.21875,"y":34.96875}},{"column":4692,"row":880,"value":10,"centerPoint":{"x":113.28125,"y":34.96875}},{"column":4693,"row":880,"value":10,"centerPoint":{"x":113.34375,"y":34.96875}}],[{"column":4678,"row":881,"value":11,"centerPoint":{"x":112.40625,"y":34.90625}},{"column":4679,"row":881,"value":11,"centerPoint":{"x":112.46875,"y":34.90625}},{"column":4680,"row":881,"value":11,"centerPoint":{"x":112.53125,"y":34.90625}},{"column":4681,"row":881,"value":11,"centerPoint":{"x":112.59375,"y":34.90625}},{"column":4682,"row":881,"value":11,"centerPoint":{"x":112.65625,"y":34.90625}},{"column":4683,"row":881,"value":11,"centerPoint":{"x":112.71875,"y":34.90625}},{"column":4684,"row":881,"value":11,"centerPoint":{"x":112.78125,"y":34.90625}},{"column":4685,"row":881,"value":11,"centerPoint":{"x":112.84375,"y":34.90625}},{"column":4686,"row":881,"value":11,"centerPoint":{"x":112.90625,"y":34.90625}},{"column":4687,"row":881,"value":11,"centerPoint":{"x":112.96875,"y":34.90625}},{"column":4688,"row":881,"value":11,"centerPoint":{"x":113.03125,"y":34.90625}},{"column":4689,"row":881,"value":11,"centerPoint":{"x":113.09375,"y":34.90625}},{"column":4690,"row":881,"value":11,"centerPoint":{"x":113.15625,"y":34.90625}},{"column":4691,"row":881,"value":11,"centerPoint":{"x":113.21875,"y":34.90625}},{"column":4692,"row":881,"value":11,"centerPoint":{"x":113.28125,"y":34.90625}},{"column":4693,"row":881,"value":11,"centerPoint":{"x":113.34375,"y":34.90625}}],[{"column":4678,"row":882,"value":10,"centerPoint":{"x":112.40625,"y":34.84375}},{"column":4679,"row":882,"value":11,"centerPoint":{"x":112.46875,"y":34.84375}},{"column":4680,"row":882,"value":11,"centerPoint":{"x":112.53125,"y":34.84375}},{"column":4681,"row":882,"value":11,"centerPoint":{"x":112.59375,"y":34.84375}},{"column":4682,"row":882,"value":11,"centerPoint":{"x":112.65625,"y":34.84375}},{"column":4683,"row":882,"value":11,"centerPoint":{"x":112.71875,"y":34.84375}},{"column":4684,"row":882,"value":11,"centerPoint":{"x":112.78125,"y":34.84375}},{"column":4685,"row":882,"value":11,"centerPoint":{"x":112.84375,"y":34.84375}},{"column":4686,"row":882,"value":11,"centerPoint":{"x":112.90625,"y":34.84375}},{"column":4687,"row":882,"value":11,"centerPoint":{"x":112.96875,"y":34.84375}},{"column":4688,"row":882,"value":11,"centerPoint":{"x":113.03125,"y":34.84375}},{"column":4689,"row":882,"value":10,"centerPoint":{"x":113.09375,"y":34.84375}},{"column":4690,"row":882,"value":11,"centerPoint":{"x":113.15625,"y":34.84375}},{"column":4691,"row":882,"value":11,"centerPoint":{"x":113.21875,"y":34.84375}},{"column":4692,"row":882,"value":10,"centerPoint":{"x":113.28125,"y":34.84375}},{"column":4693,"row":882,"value":11,"centerPoint":{"x":113.34375,"y":34.84375}}],[{"column":4678,"row":883,"value":11,"centerPoint":{"x":112.40625,"y":34.78125}},{"column":4679,"row":883,"value":11,"centerPoint":{"x":112.46875,"y":34.78125}},{"column":4680,"row":883,"value":11,"centerPoint":{"x":112.53125,"y":34.78125}},{"column":4681,"row":883,"value":11,"centerPoint":{"x":112.59375,"y":34.78125}},{"column":4682,"row":883,"value":11,"centerPoint":{"x":112.65625,"y":34.78125}},{"column":4683,"row":883,"value":11,"centerPoint":{"x":112.71875,"y":34.78125}},{"column":4684,"row":883,"value":11,"centerPoint":{"x":112.78125,"y":34.78125}},{"column":4685,"row":883,"value":11,"centerPoint":{"x":112.84375,"y":34.78125}},{"column":4686,"row":883,"value":11,"centerPoint":{"x":112.90625,"y":34.78125}},{"column":4687,"row":883,"value":11,"centerPoint":{"x":112.96875,"y":34.78125}},{"column":4688,"row":883,"value":11,"centerPoint":{"x":113.03125,"y":34.78125}},{"column":4689,"row":883,"value":10,"centerPoint":{"x":113.09375,"y":34.78125}},{"column":4690,"row":883,"value":10,"centerPoint":{"x":113.15625,"y":34.78125}},{"column":4691,"row":883,"value":11,"centerPoint":{"x":113.21875,"y":34.78125}},{"column":4692,"row":883,"value":10,"centerPoint":{"x":113.28125,"y":34.78125}},{"column":4693,"row":883,"value":11,"centerPoint":{"x":113.34375,"y":34.78125}}],[{"column":4678,"row":884,"value":11,"centerPoint":{"x":112.40625,"y":34.71875}},{"column":4679,"row":884,"value":11,"centerPoint":{"x":112.46875,"y":34.71875}},{"column":4680,"row":884,"value":11,"centerPoint":{"x":112.53125,"y":34.71875}},{"column":4681,"row":884,"value":11,"centerPoint":{"x":112.59375,"y":34.71875}},{"column":4682,"row":884,"value":11,"centerPoint":{"x":112.65625,"y":34.71875}},{"column":4683,"row":884,"value":11,"centerPoint":{"x":112.71875,"y":34.71875}},{"column":4684,"row":884,"value":11,"centerPoint":{"x":112.78125,"y":34.71875}},{"column":4685,"row":884,"value":11,"centerPoint":{"x":112.84375,"y":34.71875}},{"column":4686,"row":884,"value":11,"centerPoint":{"x":112.90625,"y":34.71875}},{"column":4687,"row":884,"value":11,"centerPoint":{"x":112.96875,"y":34.71875}},{"column":4688,"row":884,"value":11,"centerPoint":{"x":113.03125,"y":34.71875}},{"column":4689,"row":884,"value":11,"centerPoint":{"x":113.09375,"y":34.71875}},{"column":4690,"row":884,"value":10,"centerPoint":{"x":113.15625,"y":34.71875}},{"column":4691,"row":884,"value":11,"centerPoint":{"x":113.21875,"y":34.71875}},{"column":4692,"row":884,"value":11,"centerPoint":{"x":113.28125,"y":34.71875}},{"column":4693,"row":884,"value":11,"centerPoint":{"x":113.34375,"y":34.71875}}],[]],"rowCount":17,"columnCount":17}'
bounds = DefaultValuesItem()
bounds.leftBottom = Point2D()
bounds.rightTop = Point2D()
bounds.leftBottom.x = 112.351881
bounds.leftBottom.y = 34.663401
bounds.rightTop.x = 113.361881
bounds.rightTop.y = 35.673401
self.check_api(DataService.get_gridvalues,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/gridValues.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T', bounds=bounds)
jsonstr = '{"postResultType":"CreateChild","succeed":true,"customResult":[{"valuesCount":96,"values":[[],[{"column":4603,"row":908,"value":1,"centerPoint":{"x":107.71875,"y":33.21875}},{"column":4604,"row":908,"value":1,"centerPoint":{"x":107.78125,"y":33.21875}},{"column":4605,"row":908,"value":1,"centerPoint":{"x":107.84375,"y":33.21875}},{"column":4606,"row":908,"value":1,"centerPoint":{"x":107.90625,"y":33.21875}},{"column":4607,"row":908,"value":4,"centerPoint":{"x":107.96875,"y":33.21875}},{"column":4608,"row":908,"value":4,"centerPoint":{"x":108.03125,"y":33.21875}},{"column":4609,"row":908,"value":4,"centerPoint":{"x":108.09375,"y":33.21875}},{"column":4610,"row":908,"value":5,"centerPoint":{"x":108.15625,"y":33.21875}},{"column":4611,"row":908,"value":4,"centerPoint":{"x":108.21875,"y":33.21875}},{"column":4612,"row":908,"value":4,"centerPoint":{"x":108.28125,"y":33.21875}},{"column":4613,"row":908,"value":4,"centerPoint":{"x":108.34375,"y":33.21875}},{"column":4614,"row":908,"value":5,"centerPoint":{"x":108.40625,"y":33.21875}},{"column":4615,"row":908,"value":1,"centerPoint":{"x":108.46875,"y":33.21875}},{"column":4616,"row":908,"value":1,"centerPoint":{"x":108.53125,"y":33.21875}},{"column":4617,"row":908,"value":1,"centerPoint":{"x":108.59375,"y":33.21875}},{"column":4618,"row":908,"value":4,"centerPoint":{"x":108.65625,"y":33.21875}}],[{"column":4603,"row":909,"value":1,"centerPoint":{"x":107.71875,"y":33.15625}},{"column":4604,"row":909,"value":1,"centerPoint":{"x":107.78125,"y":33.15625}},{"column":4605,"row":909,"value":1,"centerPoint":{"x":107.84375,"y":33.15625}},{"column":4606,"row":909,"value":1,"centerPoint":{"x":107.90625,"y":33.15625}},{"column":4607,"row":909,"value":1,"centerPoint":{"x":107.96875,"y":33.15625}},{"column":4608,"row":909,"value":1,"centerPoint":{"x":108.03125,"y":33.15625}},{"column":4609,"row":909,"value":1,"centerPoint":{"x":108.09375,"y":33.15625}},{"column":4610,"row":909,"value":1,"centerPoint":{"x":108.15625,"y":33.15625}},{"column":4611,"row":909,"value":1,"centerPoint":{"x":108.21875,"y":33.15625}},{"column":4612,"row":909,"value":1,"centerPoint":{"x":108.28125,"y":33.15625}},{"column":4613,"row":909,"value":1,"centerPoint":{"x":108.34375,"y":33.15625}},{"column":4614,"row":909,"value":1,"centerPoint":{"x":108.40625,"y":33.15625}},{"column":4615,"row":909,"value":1,"centerPoint":{"x":108.46875,"y":33.15625}},{"column":4616,"row":909,"value":1,"centerPoint":{"x":108.53125,"y":33.15625}},{"column":4617,"row":909,"value":1,"centerPoint":{"x":108.59375,"y":33.15625}},{"column":4618,"row":909,"value":1,"centerPoint":{"x":108.65625,"y":33.15625}}],[{"column":4603,"row":910,"value":1,"centerPoint":{"x":107.71875,"y":33.09375}},{"column":4604,"row":910,"value":1,"centerPoint":{"x":107.78125,"y":33.09375}},{"column":4605,"row":910,"value":1,"centerPoint":{"x":107.84375,"y":33.09375}},{"column":4606,"row":910,"value":1,"centerPoint":{"x":107.90625,"y":33.09375}},{"column":4607,"row":910,"value":1,"centerPoint":{"x":107.96875,"y":33.09375}},{"column":4608,"row":910,"value":1,"centerPoint":{"x":108.03125,"y":33.09375}},{"column":4609,"row":910,"value":1,"centerPoint":{"x":108.09375,"y":33.09375}},{"column":4610,"row":910,"value":1,"centerPoint":{"x":108.15625,"y":33.09375}},{"column":4611,"row":910,"value":1,"centerPoint":{"x":108.21875,"y":33.09375}},{"column":4612,"row":910,"value":1,"centerPoint":{"x":108.28125,"y":33.09375}},{"column":4613,"row":910,"value":1,"centerPoint":{"x":108.34375,"y":33.09375}},{"column":4614,"row":910,"value":1,"centerPoint":{"x":108.40625,"y":33.09375}},{"column":4615,"row":910,"value":11,"centerPoint":{"x":108.46875,"y":33.09375}},{"column":4616,"row":910,"value":11,"centerPoint":{"x":108.53125,"y":33.09375}},{"column":4617,"row":910,"value":11,"centerPoint":{"x":108.59375,"y":33.09375}},{"column":4618,"row":910,"value":1,"centerPoint":{"x":108.65625,"y":33.09375}}],[{"column":4603,"row":911,"value":11,"centerPoint":{"x":107.71875,"y":33.03125}},{"column":4604,"row":911,"value":11,"centerPoint":{"x":107.78125,"y":33.03125}},{"column":4605,"row":911,"value":1,"centerPoint":{"x":107.84375,"y":33.03125}},{"column":4606,"row":911,"value":1,"centerPoint":{"x":107.90625,"y":33.03125}},{"column":4607,"row":911,"value":1,"centerPoint":{"x":107.96875,"y":33.03125}},{"column":4608,"row":911,"value":1,"centerPoint":{"x":108.03125,"y":33.03125}},{"column":4609,"row":911,"value":1,"centerPoint":{"x":108.09375,"y":33.03125}},{"column":4610,"row":911,"value":1,"centerPoint":{"x":108.15625,"y":33.03125}},{"column":4611,"row":911,"value":1,"centerPoint":{"x":108.21875,"y":33.03125}},{"column":4612,"row":911,"value":1,"centerPoint":{"x":108.28125,"y":33.03125}},{"column":4613,"row":911,"value":1,"centerPoint":{"x":108.34375,"y":33.03125}},{"column":4614,"row":911,"value":1,"centerPoint":{"x":108.40625,"y":33.03125}},{"column":4615,"row":911,"value":1,"centerPoint":{"x":108.46875,"y":33.03125}},{"column":4616,"row":911,"value":6,"centerPoint":{"x":108.53125,"y":33.03125}},{"column":4617,"row":911,"value":11,"centerPoint":{"x":108.59375,"y":33.03125}},{"column":4618,"row":911,"value":1,"centerPoint":{"x":108.65625,"y":33.03125}}],[{"column":4603,"row":912,"value":11,"centerPoint":{"x":107.71875,"y":32.96875}},{"column":4604,"row":912,"value":1,"centerPoint":{"x":107.78125,"y":32.96875}},{"column":4605,"row":912,"value":1,"centerPoint":{"x":107.84375,"y":32.96875}},{"column":4606,"row":912,"value":1,"centerPoint":{"x":107.90625,"y":32.96875}},{"column":4607,"row":912,"value":1,"centerPoint":{"x":107.96875,"y":32.96875}},{"column":4608,"row":912,"value":1,"centerPoint":{"x":108.03125,"y":32.96875}},{"column":4609,"row":912,"value":1,"centerPoint":{"x":108.09375,"y":32.96875}},{"column":4610,"row":912,"value":1,"centerPoint":{"x":108.15625,"y":32.96875}},{"column":4611,"row":912,"value":1,"centerPoint":{"x":108.21875,"y":32.96875}},{"column":4612,"row":912,"value":1,"centerPoint":{"x":108.28125,"y":32.96875}},{"column":4613,"row":912,"value":1,"centerPoint":{"x":108.34375,"y":32.96875}},{"column":4614,"row":912,"value":1,"centerPoint":{"x":108.40625,"y":32.96875}},{"column":4615,"row":912,"value":11,"centerPoint":{"x":108.46875,"y":32.96875}},{"column":4616,"row":912,"value":6,"centerPoint":{"x":108.53125,"y":32.96875}},{"column":4617,"row":912,"value":6,"centerPoint":{"x":108.59375,"y":32.96875}},{"column":4618,"row":912,"value":6,"centerPoint":{"x":108.65625,"y":32.96875}}],[{"column":4603,"row":913,"value":1,"centerPoint":{"x":107.71875,"y":32.90625}},{"column":4604,"row":913,"value":4,"centerPoint":{"x":107.78125,"y":32.90625}},{"column":4605,"row":913,"value":4,"centerPoint":{"x":107.84375,"y":32.90625}},{"column":4606,"row":913,"value":4,"centerPoint":{"x":107.90625,"y":32.90625}},{"column":4607,"row":913,"value":1,"centerPoint":{"x":107.96875,"y":32.90625}},{"column":4608,"row":913,"value":1,"centerPoint":{"x":108.03125,"y":32.90625}},{"column":4609,"row":913,"value":1,"centerPoint":{"x":108.09375,"y":32.90625}},{"column":4610,"row":913,"value":1,"centerPoint":{"x":108.15625,"y":32.90625}},{"column":4611,"row":913,"value":1,"centerPoint":{"x":108.21875,"y":32.90625}},{"column":4612,"row":913,"value":1,"centerPoint":{"x":108.28125,"y":32.90625}},{"column":4613,"row":913,"value":1,"centerPoint":{"x":108.34375,"y":32.90625}},{"column":4614,"row":913,"value":1,"centerPoint":{"x":108.40625,"y":32.90625}},{"column":4615,"row":913,"value":1,"centerPoint":{"x":108.46875,"y":32.90625}},{"column":4616,"row":913,"value":1,"centerPoint":{"x":108.53125,"y":32.90625}},{"column":4617,"row":913,"value":1,"centerPoint":{"x":108.59375,"y":32.90625}},{"column":4618,"row":913,"value":1,"centerPoint":{"x":108.65625,"y":32.90625}}]],"rowCount":7,"columnCount":17},{"valuesCount":256,"values":[[{"column":4678,"row":869,"value":10,"centerPoint":{"x":112.40625,"y":35.65625}},{"column":4679,"row":869,"value":10,"centerPoint":{"x":112.46875,"y":35.65625}},{"column":4680,"row":869,"value":10,"centerPoint":{"x":112.53125,"y":35.65625}},{"column":4681,"row":869,"value":10,"centerPoint":{"x":112.59375,"y":35.65625}},{"column":4682,"row":869,"value":10,"centerPoint":{"x":112.65625,"y":35.65625}},{"column":4683,"row":869,"value":10,"centerPoint":{"x":112.71875,"y":35.65625}},{"column":4684,"row":869,"value":11,"centerPoint":{"x":112.78125,"y":35.65625}},{"column":4685,"row":869,"value":11,"centerPoint":{"x":112.84375,"y":35.65625}},{"column":4686,"row":869,"value":11,"centerPoint":{"x":112.90625,"y":35.65625}},{"column":4687,"row":869,"value":11,"centerPoint":{"x":112.96875,"y":35.65625}},{"column":4688,"row":869,"value":11,"centerPoint":{"x":113.03125,"y":35.65625}},{"column":4689,"row":869,"value":11,"centerPoint":{"x":113.09375,"y":35.65625}},{"column":4690,"row":869,"value":11,"centerPoint":{"x":113.15625,"y":35.65625}},{"column":4691,"row":869,"value":1,"centerPoint":{"x":113.21875,"y":35.65625}},{"column":4692,"row":869,"value":1,"centerPoint":{"x":113.28125,"y":35.65625}},{"column":4693,"row":869,"value":1,"centerPoint":{"x":113.34375,"y":35.65625}}],[{"column":4678,"row":870,"value":10,"centerPoint":{"x":112.40625,"y":35.59375}},{"column":4679,"row":870,"value":10,"centerPoint":{"x":112.46875,"y":35.59375}},{"column":4680,"row":870,"value":10,"centerPoint":{"x":112.53125,"y":35.59375}},{"column":4681,"row":870,"value":10,"centerPoint":{"x":112.59375,"y":35.59375}},{"column":4682,"row":870,"value":10,"centerPoint":{"x":112.65625,"y":35.59375}},{"column":4683,"row":870,"value":11,"centerPoint":{"x":112.71875,"y":35.59375}},{"column":4684,"row":870,"value":11,"centerPoint":{"x":112.78125,"y":35.59375}},{"column":4685,"row":870,"value":11,"centerPoint":{"x":112.84375,"y":35.59375}},{"column":4686,"row":870,"value":11,"centerPoint":{"x":112.90625,"y":35.59375}},{"column":4687,"row":870,"value":11,"centerPoint":{"x":112.96875,"y":35.59375}},{"column":4688,"row":870,"value":11,"centerPoint":{"x":113.03125,"y":35.59375}},{"column":4689,"row":870,"value":11,"centerPoint":{"x":113.09375,"y":35.59375}},{"column":4690,"row":870,"value":1,"centerPoint":{"x":113.15625,"y":35.59375}},{"column":4691,"row":870,"value":1,"centerPoint":{"x":113.21875,"y":35.59375}},{"column":4692,"row":870,"value":1,"centerPoint":{"x":113.28125,"y":35.59375}},{"column":4693,"row":870,"value":1,"centerPoint":{"x":113.34375,"y":35.59375}}],[{"column":4678,"row":871,"value":10,"centerPoint":{"x":112.40625,"y":35.53125}},{"column":4679,"row":871,"value":10,"centerPoint":{"x":112.46875,"y":35.53125}},{"column":4680,"row":871,"value":10,"centerPoint":{"x":112.53125,"y":35.53125}},{"column":4681,"row":871,"value":10,"centerPoint":{"x":112.59375,"y":35.53125}},{"column":4682,"row":871,"value":11,"centerPoint":{"x":112.65625,"y":35.53125}},{"column":4683,"row":871,"value":11,"centerPoint":{"x":112.71875,"y":35.53125}},{"column":4684,"row":871,"value":11,"centerPoint":{"x":112.78125,"y":35.53125}},{"column":4685,"row":871,"value":10,"centerPoint":{"x":112.84375,"y":35.53125}},{"column":4686,"row":871,"value":11,"centerPoint":{"x":112.90625,"y":35.53125}},{"column":4687,"row":871,"value":11,"centerPoint":{"x":112.96875,"y":35.53125}},{"column":4688,"row":871,"value":11,"centerPoint":{"x":113.03125,"y":35.53125}},{"column":4689,"row":871,"value":11,"centerPoint":{"x":113.09375,"y":35.53125}},{"column":4690,"row":871,"value":1,"centerPoint":{"x":113.15625,"y":35.53125}},{"column":4691,"row":871,"value":1,"centerPoint":{"x":113.21875,"y":35.53125}},{"column":4692,"row":871,"value":1,"centerPoint":{"x":113.28125,"y":35.53125}},{"column":4693,"row":871,"value":1,"centerPoint":{"x":113.34375,"y":35.53125}}],[{"column":4678,"row":872,"value":10,"centerPoint":{"x":112.40625,"y":35.46875}},{"column":4679,"row":872,"value":11,"centerPoint":{"x":112.46875,"y":35.46875}},{"column":4680,"row":872,"value":11,"centerPoint":{"x":112.53125,"y":35.46875}},{"column":4681,"row":872,"value":11,"centerPoint":{"x":112.59375,"y":35.46875}},{"column":4682,"row":872,"value":11,"centerPoint":{"x":112.65625,"y":35.46875}},{"column":4683,"row":872,"value":10,"centerPoint":{"x":112.71875,"y":35.46875}},{"column":4684,"row":872,"value":10,"centerPoint":{"x":112.78125,"y":35.46875}},{"column":4685,"row":872,"value":10,"centerPoint":{"x":112.84375,"y":35.46875}},{"column":4686,"row":872,"value":11,"centerPoint":{"x":112.90625,"y":35.46875}},{"column":4687,"row":872,"value":11,"centerPoint":{"x":112.96875,"y":35.46875}},{"column":4688,"row":872,"value":11,"centerPoint":{"x":113.03125,"y":35.46875}},{"column":4689,"row":872,"value":1,"centerPoint":{"x":113.09375,"y":35.46875}},{"column":4690,"row":872,"value":1,"centerPoint":{"x":113.15625,"y":35.46875}},{"column":4691,"row":872,"value":11,"centerPoint":{"x":113.21875,"y":35.46875}},{"column":4692,"row":872,"value":11,"centerPoint":{"x":113.28125,"y":35.46875}},{"column":4693,"row":872,"value":11,"centerPoint":{"x":113.34375,"y":35.46875}}],[{"column":4678,"row":873,"value":11,"centerPoint":{"x":112.40625,"y":35.40625}},{"column":4679,"row":873,"value":11,"centerPoint":{"x":112.46875,"y":35.40625}},{"column":4680,"row":873,"value":11,"centerPoint":{"x":112.53125,"y":35.40625}},{"column":4681,"row":873,"value":11,"centerPoint":{"x":112.59375,"y":35.40625}},{"column":4682,"row":873,"value":10,"centerPoint":{"x":112.65625,"y":35.40625}},{"column":4683,"row":873,"value":10,"centerPoint":{"x":112.71875,"y":35.40625}},{"column":4684,"row":873,"value":10,"centerPoint":{"x":112.78125,"y":35.40625}},{"column":4685,"row":873,"value":11,"centerPoint":{"x":112.84375,"y":35.40625}},{"column":4686,"row":873,"value":11,"centerPoint":{"x":112.90625,"y":35.40625}},{"column":4687,"row":873,"value":10,"centerPoint":{"x":112.96875,"y":35.40625}},{"column":4688,"row":873,"value":10,"centerPoint":{"x":113.03125,"y":35.40625}},{"column":4689,"row":873,"value":10,"centerPoint":{"x":113.09375,"y":35.40625}},{"column":4690,"row":873,"value":11,"centerPoint":{"x":113.15625,"y":35.40625}},{"column":4691,"row":873,"value":11,"centerPoint":{"x":113.21875,"y":35.40625}},{"column":4692,"row":873,"value":10,"centerPoint":{"x":113.28125,"y":35.40625}},{"column":4693,"row":873,"value":10,"centerPoint":{"x":113.34375,"y":35.40625}}],[{"column":4678,"row":874,"value":11,"centerPoint":{"x":112.40625,"y":35.34375}},{"column":4679,"row":874,"value":11,"centerPoint":{"x":112.46875,"y":35.34375}},{"column":4680,"row":874,"value":11,"centerPoint":{"x":112.53125,"y":35.34375}},{"column":4681,"row":874,"value":11,"centerPoint":{"x":112.59375,"y":35.34375}},{"column":4682,"row":874,"value":11,"centerPoint":{"x":112.65625,"y":35.34375}},{"column":4683,"row":874,"value":11,"centerPoint":{"x":112.71875,"y":35.34375}},{"column":4684,"row":874,"value":11,"centerPoint":{"x":112.78125,"y":35.34375}},{"column":4685,"row":874,"value":11,"centerPoint":{"x":112.84375,"y":35.34375}},{"column":4686,"row":874,"value":11,"centerPoint":{"x":112.90625,"y":35.34375}},{"column":4687,"row":874,"value":11,"centerPoint":{"x":112.96875,"y":35.34375}},{"column":4688,"row":874,"value":11,"centerPoint":{"x":113.03125,"y":35.34375}},{"column":4689,"row":874,"value":11,"centerPoint":{"x":113.09375,"y":35.34375}},{"column":4690,"row":874,"value":11,"centerPoint":{"x":113.15625,"y":35.34375}},{"column":4691,"row":874,"value":10,"centerPoint":{"x":113.21875,"y":35.34375}},{"column":4692,"row":874,"value":10,"centerPoint":{"x":113.28125,"y":35.34375}},{"column":4693,"row":874,"value":11,"centerPoint":{"x":113.34375,"y":35.34375}}],[{"column":4678,"row":875,"value":1,"centerPoint":{"x":112.40625,"y":35.28125}},{"column":4679,"row":875,"value":11,"centerPoint":{"x":112.46875,"y":35.28125}},{"column":4680,"row":875,"value":11,"centerPoint":{"x":112.53125,"y":35.28125}},{"column":4681,"row":875,"value":11,"centerPoint":{"x":112.59375,"y":35.28125}},{"column":4682,"row":875,"value":11,"centerPoint":{"x":112.65625,"y":35.28125}},{"column":4683,"row":875,"value":11,"centerPoint":{"x":112.71875,"y":35.28125}},{"column":4684,"row":875,"value":11,"centerPoint":{"x":112.78125,"y":35.28125}},{"column":4685,"row":875,"value":11,"centerPoint":{"x":112.84375,"y":35.28125}},{"column":4686,"row":875,"value":11,"centerPoint":{"x":112.90625,"y":35.28125}},{"column":4687,"row":875,"value":11,"centerPoint":{"x":112.96875,"y":35.28125}},{"column":4688,"row":875,"value":11,"centerPoint":{"x":113.03125,"y":35.28125}},{"column":4689,"row":875,"value":11,"centerPoint":{"x":113.09375,"y":35.28125}},{"column":4690,"row":875,"value":11,"centerPoint":{"x":113.15625,"y":35.28125}},{"column":4691,"row":875,"value":11,"centerPoint":{"x":113.21875,"y":35.28125}},{"column":4692,"row":875,"value":11,"centerPoint":{"x":113.28125,"y":35.28125}},{"column":4693,"row":875,"value":11,"centerPoint":{"x":113.34375,"y":35.28125}}],[{"column":4678,"row":876,"value":11,"centerPoint":{"x":112.40625,"y":35.21875}},{"column":4679,"row":876,"value":11,"centerPoint":{"x":112.46875,"y":35.21875}},{"column":4680,"row":876,"value":11,"centerPoint":{"x":112.53125,"y":35.21875}},{"column":4681,"row":876,"value":11,"centerPoint":{"x":112.59375,"y":35.21875}},{"column":4682,"row":876,"value":11,"centerPoint":{"x":112.65625,"y":35.21875}},{"column":4683,"row":876,"value":11,"centerPoint":{"x":112.71875,"y":35.21875}},{"column":4684,"row":876,"value":11,"centerPoint":{"x":112.78125,"y":35.21875}},{"column":4685,"row":876,"value":11,"centerPoint":{"x":112.84375,"y":35.21875}},{"column":4686,"row":876,"value":11,"centerPoint":{"x":112.90625,"y":35.21875}},{"column":4687,"row":876,"value":11,"centerPoint":{"x":112.96875,"y":35.21875}},{"column":4688,"row":876,"value":11,"centerPoint":{"x":113.03125,"y":35.21875}},{"column":4689,"row":876,"value":11,"centerPoint":{"x":113.09375,"y":35.21875}},{"column":4690,"row":876,"value":11,"centerPoint":{"x":113.15625,"y":35.21875}},{"column":4691,"row":876,"value":11,"centerPoint":{"x":113.21875,"y":35.21875}},{"column":4692,"row":876,"value":11,"centerPoint":{"x":113.28125,"y":35.21875}},{"column":4693,"row":876,"value":11,"centerPoint":{"x":113.34375,"y":35.21875}}],[{"column":4678,"row":877,"value":11,"centerPoint":{"x":112.40625,"y":35.15625}},{"column":4679,"row":877,"value":11,"centerPoint":{"x":112.46875,"y":35.15625}},{"column":4680,"row":877,"value":11,"centerPoint":{"x":112.53125,"y":35.15625}},{"column":4681,"row":877,"value":11,"centerPoint":{"x":112.59375,"y":35.15625}},{"column":4682,"row":877,"value":11,"centerPoint":{"x":112.65625,"y":35.15625}},{"column":4683,"row":877,"value":11,"centerPoint":{"x":112.71875,"y":35.15625}},{"column":4684,"row":877,"value":11,"centerPoint":{"x":112.78125,"y":35.15625}},{"column":4685,"row":877,"value":11,"centerPoint":{"x":112.84375,"y":35.15625}},{"column":4686,"row":877,"value":11,"centerPoint":{"x":112.90625,"y":35.15625}},{"column":4687,"row":877,"value":11,"centerPoint":{"x":112.96875,"y":35.15625}},{"column":4688,"row":877,"value":11,"centerPoint":{"x":113.03125,"y":35.15625}},{"column":4689,"row":877,"value":11,"centerPoint":{"x":113.09375,"y":35.15625}},{"column":4690,"row":877,"value":11,"centerPoint":{"x":113.15625,"y":35.15625}},{"column":4691,"row":877,"value":11,"centerPoint":{"x":113.21875,"y":35.15625}},{"column":4692,"row":877,"value":11,"centerPoint":{"x":113.28125,"y":35.15625}},{"column":4693,"row":877,"value":11,"centerPoint":{"x":113.34375,"y":35.15625}}],[{"column":4678,"row":878,"value":11,"centerPoint":{"x":112.40625,"y":35.09375}},{"column":4679,"row":878,"value":11,"centerPoint":{"x":112.46875,"y":35.09375}},{"column":4680,"row":878,"value":11,"centerPoint":{"x":112.53125,"y":35.09375}},{"column":4681,"row":878,"value":11,"centerPoint":{"x":112.59375,"y":35.09375}},{"column":4682,"row":878,"value":11,"centerPoint":{"x":112.65625,"y":35.09375}},{"column":4683,"row":878,"value":11,"centerPoint":{"x":112.71875,"y":35.09375}},{"column":4684,"row":878,"value":11,"centerPoint":{"x":112.78125,"y":35.09375}},{"column":4685,"row":878,"value":11,"centerPoint":{"x":112.84375,"y":35.09375}},{"column":4686,"row":878,"value":11,"centerPoint":{"x":112.90625,"y":35.09375}},{"column":4687,"row":878,"value":11,"centerPoint":{"x":112.96875,"y":35.09375}},{"column":4688,"row":878,"value":11,"centerPoint":{"x":113.03125,"y":35.09375}},{"column":4689,"row":878,"value":11,"centerPoint":{"x":113.09375,"y":35.09375}},{"column":4690,"row":878,"value":11,"centerPoint":{"x":113.15625,"y":35.09375}},{"column":4691,"row":878,"value":11,"centerPoint":{"x":113.21875,"y":35.09375}},{"column":4692,"row":878,"value":11,"centerPoint":{"x":113.28125,"y":35.09375}},{"column":4693,"row":878,"value":11,"centerPoint":{"x":113.34375,"y":35.09375}}],[{"column":4678,"row":879,"value":11,"centerPoint":{"x":112.40625,"y":35.03125}},{"column":4679,"row":879,"value":11,"centerPoint":{"x":112.46875,"y":35.03125}},{"column":4680,"row":879,"value":11,"centerPoint":{"x":112.53125,"y":35.03125}},{"column":4681,"row":879,"value":11,"centerPoint":{"x":112.59375,"y":35.03125}},{"column":4682,"row":879,"value":11,"centerPoint":{"x":112.65625,"y":35.03125}},{"column":4683,"row":879,"value":11,"centerPoint":{"x":112.71875,"y":35.03125}},{"column":4684,"row":879,"value":11,"centerPoint":{"x":112.78125,"y":35.03125}},{"column":4685,"row":879,"value":11,"centerPoint":{"x":112.84375,"y":35.03125}},{"column":4686,"row":879,"value":11,"centerPoint":{"x":112.90625,"y":35.03125}},{"column":4687,"row":879,"value":11,"centerPoint":{"x":112.96875,"y":35.03125}},{"column":4688,"row":879,"value":11,"centerPoint":{"x":113.03125,"y":35.03125}},{"column":4689,"row":879,"value":11,"centerPoint":{"x":113.09375,"y":35.03125}},{"column":4690,"row":879,"value":11,"centerPoint":{"x":113.15625,"y":35.03125}},{"column":4691,"row":879,"value":11,"centerPoint":{"x":113.21875,"y":35.03125}},{"column":4692,"row":879,"value":11,"centerPoint":{"x":113.28125,"y":35.03125}},{"column":4693,"row":879,"value":11,"centerPoint":{"x":113.34375,"y":35.03125}}],[{"column":4678,"row":880,"value":10,"centerPoint":{"x":112.40625,"y":34.96875}},{"column":4679,"row":880,"value":10,"centerPoint":{"x":112.46875,"y":34.96875}},{"column":4680,"row":880,"value":11,"centerPoint":{"x":112.53125,"y":34.96875}},{"column":4681,"row":880,"value":11,"centerPoint":{"x":112.59375,"y":34.96875}},{"column":4682,"row":880,"value":11,"centerPoint":{"x":112.65625,"y":34.96875}},{"column":4683,"row":880,"value":11,"centerPoint":{"x":112.71875,"y":34.96875}},{"column":4684,"row":880,"value":11,"centerPoint":{"x":112.78125,"y":34.96875}},{"column":4685,"row":880,"value":11,"centerPoint":{"x":112.84375,"y":34.96875}},{"column":4686,"row":880,"value":11,"centerPoint":{"x":112.90625,"y":34.96875}},{"column":4687,"row":880,"value":11,"centerPoint":{"x":112.96875,"y":34.96875}},{"column":4688,"row":880,"value":11,"centerPoint":{"x":113.03125,"y":34.96875}},{"column":4689,"row":880,"value":11,"centerPoint":{"x":113.09375,"y":34.96875}},{"column":4690,"row":880,"value":11,"centerPoint":{"x":113.15625,"y":34.96875}},{"column":4691,"row":880,"value":11,"centerPoint":{"x":113.21875,"y":34.96875}},{"column":4692,"row":880,"value":10,"centerPoint":{"x":113.28125,"y":34.96875}},{"column":4693,"row":880,"value":10,"centerPoint":{"x":113.34375,"y":34.96875}}],[{"column":4678,"row":881,"value":11,"centerPoint":{"x":112.40625,"y":34.90625}},{"column":4679,"row":881,"value":11,"centerPoint":{"x":112.46875,"y":34.90625}},{"column":4680,"row":881,"value":11,"centerPoint":{"x":112.53125,"y":34.90625}},{"column":4681,"row":881,"value":11,"centerPoint":{"x":112.59375,"y":34.90625}},{"column":4682,"row":881,"value":11,"centerPoint":{"x":112.65625,"y":34.90625}},{"column":4683,"row":881,"value":11,"centerPoint":{"x":112.71875,"y":34.90625}},{"column":4684,"row":881,"value":11,"centerPoint":{"x":112.78125,"y":34.90625}},{"column":4685,"row":881,"value":11,"centerPoint":{"x":112.84375,"y":34.90625}},{"column":4686,"row":881,"value":11,"centerPoint":{"x":112.90625,"y":34.90625}},{"column":4687,"row":881,"value":11,"centerPoint":{"x":112.96875,"y":34.90625}},{"column":4688,"row":881,"value":11,"centerPoint":{"x":113.03125,"y":34.90625}},{"column":4689,"row":881,"value":11,"centerPoint":{"x":113.09375,"y":34.90625}},{"column":4690,"row":881,"value":11,"centerPoint":{"x":113.15625,"y":34.90625}},{"column":4691,"row":881,"value":11,"centerPoint":{"x":113.21875,"y":34.90625}},{"column":4692,"row":881,"value":11,"centerPoint":{"x":113.28125,"y":34.90625}},{"column":4693,"row":881,"value":11,"centerPoint":{"x":113.34375,"y":34.90625}}],[{"column":4678,"row":882,"value":10,"centerPoint":{"x":112.40625,"y":34.84375}},{"column":4679,"row":882,"value":11,"centerPoint":{"x":112.46875,"y":34.84375}},{"column":4680,"row":882,"value":11,"centerPoint":{"x":112.53125,"y":34.84375}},{"column":4681,"row":882,"value":11,"centerPoint":{"x":112.59375,"y":34.84375}},{"column":4682,"row":882,"value":11,"centerPoint":{"x":112.65625,"y":34.84375}},{"column":4683,"row":882,"value":11,"centerPoint":{"x":112.71875,"y":34.84375}},{"column":4684,"row":882,"value":11,"centerPoint":{"x":112.78125,"y":34.84375}},{"column":4685,"row":882,"value":11,"centerPoint":{"x":112.84375,"y":34.84375}},{"column":4686,"row":882,"value":11,"centerPoint":{"x":112.90625,"y":34.84375}},{"column":4687,"row":882,"value":11,"centerPoint":{"x":112.96875,"y":34.84375}},{"column":4688,"row":882,"value":11,"centerPoint":{"x":113.03125,"y":34.84375}},{"column":4689,"row":882,"value":10,"centerPoint":{"x":113.09375,"y":34.84375}},{"column":4690,"row":882,"value":11,"centerPoint":{"x":113.15625,"y":34.84375}},{"column":4691,"row":882,"value":11,"centerPoint":{"x":113.21875,"y":34.84375}},{"column":4692,"row":882,"value":10,"centerPoint":{"x":113.28125,"y":34.84375}},{"column":4693,"row":882,"value":11,"centerPoint":{"x":113.34375,"y":34.84375}}],[{"column":4678,"row":883,"value":11,"centerPoint":{"x":112.40625,"y":34.78125}},{"column":4679,"row":883,"value":11,"centerPoint":{"x":112.46875,"y":34.78125}},{"column":4680,"row":883,"value":11,"centerPoint":{"x":112.53125,"y":34.78125}},{"column":4681,"row":883,"value":11,"centerPoint":{"x":112.59375,"y":34.78125}},{"column":4682,"row":883,"value":11,"centerPoint":{"x":112.65625,"y":34.78125}},{"column":4683,"row":883,"value":11,"centerPoint":{"x":112.71875,"y":34.78125}},{"column":4684,"row":883,"value":11,"centerPoint":{"x":112.78125,"y":34.78125}},{"column":4685,"row":883,"value":11,"centerPoint":{"x":112.84375,"y":34.78125}},{"column":4686,"row":883,"value":11,"centerPoint":{"x":112.90625,"y":34.78125}},{"column":4687,"row":883,"value":11,"centerPoint":{"x":112.96875,"y":34.78125}},{"column":4688,"row":883,"value":11,"centerPoint":{"x":113.03125,"y":34.78125}},{"column":4689,"row":883,"value":10,"centerPoint":{"x":113.09375,"y":34.78125}},{"column":4690,"row":883,"value":10,"centerPoint":{"x":113.15625,"y":34.78125}},{"column":4691,"row":883,"value":11,"centerPoint":{"x":113.21875,"y":34.78125}},{"column":4692,"row":883,"value":10,"centerPoint":{"x":113.28125,"y":34.78125}},{"column":4693,"row":883,"value":11,"centerPoint":{"x":113.34375,"y":34.78125}}],[{"column":4678,"row":884,"value":11,"centerPoint":{"x":112.40625,"y":34.71875}},{"column":4679,"row":884,"value":11,"centerPoint":{"x":112.46875,"y":34.71875}},{"column":4680,"row":884,"value":11,"centerPoint":{"x":112.53125,"y":34.71875}},{"column":4681,"row":884,"value":11,"centerPoint":{"x":112.59375,"y":34.71875}},{"column":4682,"row":884,"value":11,"centerPoint":{"x":112.65625,"y":34.71875}},{"column":4683,"row":884,"value":11,"centerPoint":{"x":112.71875,"y":34.71875}},{"column":4684,"row":884,"value":11,"centerPoint":{"x":112.78125,"y":34.71875}},{"column":4685,"row":884,"value":11,"centerPoint":{"x":112.84375,"y":34.71875}},{"column":4686,"row":884,"value":11,"centerPoint":{"x":112.90625,"y":34.71875}},{"column":4687,"row":884,"value":11,"centerPoint":{"x":112.96875,"y":34.71875}},{"column":4688,"row":884,"value":11,"centerPoint":{"x":113.03125,"y":34.71875}},{"column":4689,"row":884,"value":11,"centerPoint":{"x":113.09375,"y":34.71875}},{"column":4690,"row":884,"value":10,"centerPoint":{"x":113.15625,"y":34.71875}},{"column":4691,"row":884,"value":11,"centerPoint":{"x":113.21875,"y":34.71875}},{"column":4692,"row":884,"value":11,"centerPoint":{"x":113.28125,"y":34.71875}},{"column":4693,"row":884,"value":11,"centerPoint":{"x":113.34375,"y":34.71875}}],[]],"rowCount":17,"columnCount":17}]}'
bounds = DefaultValuesItem()
bounds.circle = Circle()
bounds.circle.centerPoint = Point2D()
bounds.circle.centerPoint.x = 108.012838
bounds.circle.centerPoint.y = 33.594138
bounds.circle.radius = 1
bounds2 = DefaultValuesItem()
bounds2.circle = Circle()
bounds2.circle.centerPoint = Point2D()
bounds2.circle.centerPoint.x = 112.351881
bounds2.circle.centerPoint.y = 35.673401
bounds2.circle.radius = 1
entity = [bounds2, bounds]
self.check_api(DataService.post_gridvalues,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/gridValues.json",
HttpMethod.POST, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T', entity=entity)
jsonstr = '{"column":4677,"row":885,"value":11,"centerPoint":{"x":112.361881,"y":34.673401}}'
self.check_api(DataService.get_gridvalue,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/gridValue.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T', x=112.361881, y=34.673401)
jsonstr = '{"valuesCount":36,"values":[[{"color":{"red":50,"green":64,"blue":28,"alpha":255},"column":1663,"bounds":null,"row":309,"value":3293212,"centerPoint":{"x":112.412109375,"y":35.595703125}},{"color":{"red":64,"green":69,"blue":37,"alpha":255},"column":1664,"bounds":null,"row":309,"value":4212005,"centerPoint":{"x":112.587890625,"y":35.595703125}},{"color":{"red":64,"green":69,"blue":37,"alpha":255},"column":1665,"bounds":null,"row":309,"value":4212005,"centerPoint":{"x":112.763671875,"y":35.595703125}},{"color":{"red":59,"green":66,"blue":33,"alpha":255},"column":1666,"bounds":null,"row":309,"value":3883553,"centerPoint":{"x":112.939453125,"y":35.595703125}},{"color":{"red":48,"green":55,"blue":22,"alpha":255},"column":1667,"bounds":null,"row":309,"value":3159830,"centerPoint":{"x":113.115234375,"y":35.595703125}},{"color":{"red":37,"green":44,"blue":11,"alpha":255},"column":1668,"bounds":null,"row":309,"value":2436107,"centerPoint":{"x":113.291015625,"y":35.595703125}}],[{"color":{"red":55,"green":67,"blue":29,"alpha":255},"column":1663,"bounds":null,"row":310,"value":3621661,"centerPoint":{"x":112.412109375,"y":35.419921875}},{"color":{"red":68,"green":73,"blue":41,"alpha":255},"column":1664,"bounds":null,"row":310,"value":4475177,"centerPoint":{"x":112.587890625,"y":35.419921875}},{"color":{"red":62,"green":67,"blue":35,"alpha":255},"column":1665,"bounds":null,"row":310,"value":4080419,"centerPoint":{"x":112.763671875,"y":35.419921875}},{"color":{"red":53,"green":58,"blue":26,"alpha":255},"column":1666,"bounds":null,"row":310,"value":3488282,"centerPoint":{"x":112.939453125,"y":35.419921875}},{"color":{"red":46,"green":51,"blue":19,"alpha":255},"column":1667,"bounds":null,"row":310,"value":3027731,"centerPoint":{"x":113.115234375,"y":35.419921875}},{"color":{"red":44,"green":51,"blue":18,"alpha":255},"column":1668,"bounds":null,"row":310,"value":2896658,"centerPoint":{"x":113.291015625,"y":35.419921875}}],[{"color":{"red":39,"green":51,"blue":13,"alpha":255},"column":1663,"bounds":null,"row":311,"value":2568973,"centerPoint":{"x":112.412109375,"y":35.244140625}},{"color":{"red":43,"green":48,"blue":16,"alpha":255},"column":1664,"bounds":null,"row":311,"value":2830352,"centerPoint":{"x":112.587890625,"y":35.244140625}},{"color":{"red":45,"green":50,"blue":18,"alpha":255},"column":1665,"bounds":null,"row":311,"value":2961938,"centerPoint":{"x":112.763671875,"y":35.244140625}},{"color":{"red":52,"green":57,"blue":25,"alpha":255},"column":1666,"bounds":null,"row":311,"value":3422489,"centerPoint":{"x":112.939453125,"y":35.244140625}},{"color":{"red":64,"green":69,"blue":37,"alpha":255},"column":1667,"bounds":null,"row":311,"value":4212005,"centerPoint":{"x":113.115234375,"y":35.244140625}},{"color":{"red":74,"green":79,"blue":47,"alpha":255},"column":1668,"bounds":null,"row":311,"value":4869935,"centerPoint":{"x":113.291015625,"y":35.244140625}}],[{"color":{"red":43,"green":55,"blue":19,"alpha":255},"column":1663,"bounds":null,"row":312,"value":2832147,"centerPoint":{"x":112.412109375,"y":35.068359375}},{"color":{"red":64,"green":68,"blue":35,"alpha":255},"column":1664,"bounds":null,"row":312,"value":4211747,"centerPoint":{"x":112.587890625,"y":35.068359375}},{"color":{"red":69,"green":73,"blue":40,"alpha":255},"column":1665,"bounds":null,"row":312,"value":4540712,"centerPoint":{"x":112.763671875,"y":35.068359375}},{"color":{"red":74,"green":78,"blue":45,"alpha":255},"column":1666,"bounds":null,"row":312,"value":4869677,"centerPoint":{"x":112.939453125,"y":35.068359375}},{"color":{"red":75,"green":81,"blue":47,"alpha":255},"column":1667,"bounds":null,"row":312,"value":4935983,"centerPoint":{"x":113.115234375,"y":35.068359375}},{"color":{"red":77,"green":83,"blue":49,"alpha":255},"column":1668,"bounds":null,"row":312,"value":5067569,"centerPoint":{"x":113.291015625,"y":35.068359375}}],[{"color":{"red":59,"green":71,"blue":35,"alpha":255},"column":1663,"bounds":null,"row":313,"value":3884835,"centerPoint":{"x":112.412109375,"y":34.892578125}},{"color":{"red":70,"green":74,"blue":41,"alpha":255},"column":1664,"bounds":null,"row":313,"value":4606505,"centerPoint":{"x":112.587890625,"y":34.892578125}},{"color":{"red":75,"green":79,"blue":46,"alpha":255},"column":1665,"bounds":null,"row":313,"value":4935470,"centerPoint":{"x":112.763671875,"y":34.892578125}},{"color":{"red":77,"green":81,"blue":48,"alpha":255},"column":1666,"bounds":null,"row":313,"value":5067056,"centerPoint":{"x":112.939453125,"y":34.892578125}},{"color":{"red":75,"green":79,"blue":46,"alpha":255},"column":1667,"bounds":null,"row":313,"value":4935470,"centerPoint":{"x":113.115234375,"y":34.892578125}},{"color":{"red":72,"green":78,"blue":44,"alpha":255},"column":1668,"bounds":null,"row":313,"value":4738604,"centerPoint":{"x":113.291015625,"y":34.892578125}}],[{"color":{"red":74,"green":84,"blue":50,"alpha":255},"column":1663,"bounds":null,"row":314,"value":4871218,"centerPoint":{"x":112.412109375,"y":34.716796875}},{"color":{"red":73,"green":77,"blue":42,"alpha":255},"column":1664,"bounds":null,"row":314,"value":4803882,"centerPoint":{"x":112.587890625,"y":34.716796875}},{"color":{"red":76,"green":80,"blue":45,"alpha":255},"column":1665,"bounds":null,"row":314,"value":5001261,"centerPoint":{"x":112.763671875,"y":34.716796875}},{"color":{"red":73,"green":77,"blue":42,"alpha":255},"column":1666,"bounds":null,"row":314,"value":4803882,"centerPoint":{"x":112.939453125,"y":34.716796875}},{"color":{"red":65,"green":69,"blue":34,"alpha":255},"column":1667,"bounds":null,"row":314,"value":4277538,"centerPoint":{"x":113.115234375,"y":34.716796875}},{"color":{"red":62,"green":66,"blue":31,"alpha":255},"column":1668,"bounds":null,"row":314,"value":4080159,"centerPoint":{"x":113.291015625,"y":34.716796875}}]],"rowCount":6,"columnCount":6}'
bounds = DefaultValuesItem()
bounds.leftBottom = Point2D()
bounds.rightTop = Point2D()
bounds.leftBottom.x = 112.351881
bounds.leftBottom.y = 34.663401
bounds.rightTop.x = 113.361881
bounds.rightTop.y = 35.673401
self.check_api(DataService.get_imagevalues,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/imageValues.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T', bounds=bounds)
jsonstr = '{"postResultType":"CreateChild","succeed":true,"customResult":[{"valuesCount":10,"values":[[],[{"color":{"red":44,"green":54,"blue":27,"alpha":255},"column":1637,"bounds":null,"row":323,"value":2897435,"centerPoint":{"x":107.841796875,"y":33.134765625}},{"color":{"red":38,"green":48,"blue":21,"alpha":255},"column":1638,"bounds":null,"row":323,"value":2502677,"centerPoint":{"x":108.017578125,"y":33.134765625}},{"color":{"red":35,"green":45,"blue":18,"alpha":255},"column":1639,"bounds":null,"row":323,"value":2305298,"centerPoint":{"x":108.193359375,"y":33.134765625}},{"color":{"red":32,"green":42,"blue":15,"alpha":255},"column":1640,"bounds":null,"row":323,"value":2107919,"centerPoint":{"x":108.369140625,"y":33.134765625}},{"color":{"red":28,"green":38,"blue":11,"alpha":255},"column":1641,"bounds":null,"row":323,"value":1844747,"centerPoint":{"x":108.544921875,"y":33.134765625}}],[{"color":{"red":35,"green":45,"blue":18,"alpha":255},"column":1637,"bounds":null,"row":324,"value":2305298,"centerPoint":{"x":107.841796875,"y":32.958984375}},{"color":{"red":33,"green":43,"blue":16,"alpha":255},"column":1638,"bounds":null,"row":324,"value":2173712,"centerPoint":{"x":108.017578125,"y":32.958984375}},{"color":{"red":31,"green":41,"blue":14,"alpha":255},"column":1639,"bounds":null,"row":324,"value":2042126,"centerPoint":{"x":108.193359375,"y":32.958984375}},{"color":{"red":35,"green":45,"blue":18,"alpha":255},"column":1640,"bounds":null,"row":324,"value":2305298,"centerPoint":{"x":108.369140625,"y":32.958984375}},{"color":{"red":33,"green":43,"blue":16,"alpha":255},"column":1641,"bounds":null,"row":324,"value":2173712,"centerPoint":{"x":108.544921875,"y":32.958984375}}]],"rowCount":3,"columnCount":7},{"valuesCount":36,"values":[[{"color":{"red":50,"green":64,"blue":28,"alpha":255},"column":1663,"bounds":null,"row":309,"value":3293212,"centerPoint":{"x":112.412109375,"y":35.595703125}},{"color":{"red":64,"green":69,"blue":37,"alpha":255},"column":1664,"bounds":null,"row":309,"value":4212005,"centerPoint":{"x":112.587890625,"y":35.595703125}},{"color":{"red":64,"green":69,"blue":37,"alpha":255},"column":1665,"bounds":null,"row":309,"value":4212005,"centerPoint":{"x":112.763671875,"y":35.595703125}},{"color":{"red":59,"green":66,"blue":33,"alpha":255},"column":1666,"bounds":null,"row":309,"value":3883553,"centerPoint":{"x":112.939453125,"y":35.595703125}},{"color":{"red":48,"green":55,"blue":22,"alpha":255},"column":1667,"bounds":null,"row":309,"value":3159830,"centerPoint":{"x":113.115234375,"y":35.595703125}},{"color":{"red":37,"green":44,"blue":11,"alpha":255},"column":1668,"bounds":null,"row":309,"value":2436107,"centerPoint":{"x":113.291015625,"y":35.595703125}}],[{"color":{"red":55,"green":67,"blue":29,"alpha":255},"column":1663,"bounds":null,"row":310,"value":3621661,"centerPoint":{"x":112.412109375,"y":35.419921875}},{"color":{"red":68,"green":73,"blue":41,"alpha":255},"column":1664,"bounds":null,"row":310,"value":4475177,"centerPoint":{"x":112.587890625,"y":35.419921875}},{"color":{"red":62,"green":67,"blue":35,"alpha":255},"column":1665,"bounds":null,"row":310,"value":4080419,"centerPoint":{"x":112.763671875,"y":35.419921875}},{"color":{"red":53,"green":58,"blue":26,"alpha":255},"column":1666,"bounds":null,"row":310,"value":3488282,"centerPoint":{"x":112.939453125,"y":35.419921875}},{"color":{"red":46,"green":51,"blue":19,"alpha":255},"column":1667,"bounds":null,"row":310,"value":3027731,"centerPoint":{"x":113.115234375,"y":35.419921875}},{"color":{"red":44,"green":51,"blue":18,"alpha":255},"column":1668,"bounds":null,"row":310,"value":2896658,"centerPoint":{"x":113.291015625,"y":35.419921875}}],[{"color":{"red":39,"green":51,"blue":13,"alpha":255},"column":1663,"bounds":null,"row":311,"value":2568973,"centerPoint":{"x":112.412109375,"y":35.244140625}},{"color":{"red":43,"green":48,"blue":16,"alpha":255},"column":1664,"bounds":null,"row":311,"value":2830352,"centerPoint":{"x":112.587890625,"y":35.244140625}},{"color":{"red":45,"green":50,"blue":18,"alpha":255},"column":1665,"bounds":null,"row":311,"value":2961938,"centerPoint":{"x":112.763671875,"y":35.244140625}},{"color":{"red":52,"green":57,"blue":25,"alpha":255},"column":1666,"bounds":null,"row":311,"value":3422489,"centerPoint":{"x":112.939453125,"y":35.244140625}},{"color":{"red":64,"green":69,"blue":37,"alpha":255},"column":1667,"bounds":null,"row":311,"value":4212005,"centerPoint":{"x":113.115234375,"y":35.244140625}},{"color":{"red":74,"green":79,"blue":47,"alpha":255},"column":1668,"bounds":null,"row":311,"value":4869935,"centerPoint":{"x":113.291015625,"y":35.244140625}}],[{"color":{"red":43,"green":55,"blue":19,"alpha":255},"column":1663,"bounds":null,"row":312,"value":2832147,"centerPoint":{"x":112.412109375,"y":35.068359375}},{"color":{"red":64,"green":68,"blue":35,"alpha":255},"column":1664,"bounds":null,"row":312,"value":4211747,"centerPoint":{"x":112.587890625,"y":35.068359375}},{"color":{"red":69,"green":73,"blue":40,"alpha":255},"column":1665,"bounds":null,"row":312,"value":4540712,"centerPoint":{"x":112.763671875,"y":35.068359375}},{"color":{"red":74,"green":78,"blue":45,"alpha":255},"column":1666,"bounds":null,"row":312,"value":4869677,"centerPoint":{"x":112.939453125,"y":35.068359375}},{"color":{"red":75,"green":81,"blue":47,"alpha":255},"column":1667,"bounds":null,"row":312,"value":4935983,"centerPoint":{"x":113.115234375,"y":35.068359375}},{"color":{"red":77,"green":83,"blue":49,"alpha":255},"column":1668,"bounds":null,"row":312,"value":5067569,"centerPoint":{"x":113.291015625,"y":35.068359375}}],[{"color":{"red":59,"green":71,"blue":35,"alpha":255},"column":1663,"bounds":null,"row":313,"value":3884835,"centerPoint":{"x":112.412109375,"y":34.892578125}},{"color":{"red":70,"green":74,"blue":41,"alpha":255},"column":1664,"bounds":null,"row":313,"value":4606505,"centerPoint":{"x":112.587890625,"y":34.892578125}},{"color":{"red":75,"green":79,"blue":46,"alpha":255},"column":1665,"bounds":null,"row":313,"value":4935470,"centerPoint":{"x":112.763671875,"y":34.892578125}},{"color":{"red":77,"green":81,"blue":48,"alpha":255},"column":1666,"bounds":null,"row":313,"value":5067056,"centerPoint":{"x":112.939453125,"y":34.892578125}},{"color":{"red":75,"green":79,"blue":46,"alpha":255},"column":1667,"bounds":null,"row":313,"value":4935470,"centerPoint":{"x":113.115234375,"y":34.892578125}},{"color":{"red":72,"green":78,"blue":44,"alpha":255},"column":1668,"bounds":null,"row":313,"value":4738604,"centerPoint":{"x":113.291015625,"y":34.892578125}}],[{"color":{"red":74,"green":84,"blue":50,"alpha":255},"column":1663,"bounds":null,"row":314,"value":4871218,"centerPoint":{"x":112.412109375,"y":34.716796875}},{"color":{"red":73,"green":77,"blue":42,"alpha":255},"column":1664,"bounds":null,"row":314,"value":4803882,"centerPoint":{"x":112.587890625,"y":34.716796875}},{"color":{"red":76,"green":80,"blue":45,"alpha":255},"column":1665,"bounds":null,"row":314,"value":5001261,"centerPoint":{"x":112.763671875,"y":34.716796875}},{"color":{"red":73,"green":77,"blue":42,"alpha":255},"column":1666,"bounds":null,"row":314,"value":4803882,"centerPoint":{"x":112.939453125,"y":34.716796875}},{"color":{"red":65,"green":69,"blue":34,"alpha":255},"column":1667,"bounds":null,"row":314,"value":4277538,"centerPoint":{"x":113.115234375,"y":34.716796875}},{"color":{"red":62,"green":66,"blue":31,"alpha":255},"column":1668,"bounds":null,"row":314,"value":4080159,"centerPoint":{"x":113.291015625,"y":34.716796875}}]],"rowCount":6,"columnCount":6}]}'
bounds = DefaultValuesItem()
bounds.circle = Circle()
bounds.circle.centerPoint = Point2D()
bounds.circle.centerPoint.x = 108.012838
bounds.circle.centerPoint.y = 33.594138
bounds.circle.radius = 1
bounds2 = DefaultValuesItem()
bounds2.circle = Circle()
bounds2.circle.centerPoint = Point2D()
bounds2.circle.centerPoint.x = 112.351881
bounds2.circle.centerPoint.y = 35.673401
bounds2.circle.radius = 1
entity = [bounds2, bounds]
self.check_api(DataService.post_imagevalues,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/imageValues.json",
HttpMethod.POST, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T', entity=entity)
jsonstr = '{"color":{"red":74,"green":84,"blue":50,"alpha":255},"column":1663,"bounds":null,"row":314,"value":4871218,"centerPoint":{"x":112.361881,"y":34.673401}}'
self.check_api(DataService.get_imagevalue,
self.baseuri + "/services/data-World/rest/data/datasources/World/datasets/continent_T/imageValue.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), datasourceName='World',
datasetName='continent_T', x=112.361881, y=34.673401)
jsonstr = '[{"resourceConfigID":null,"supportedMediaTypes":["application/xml","text/xml","application/json","application/fastjson","application/rjson","text/html","application/jsonp","application/x-java-serialized-object","application/geojson"],"path":"http://192.168.20.182:8090/iserver/services/data-World/rest/data/featureResults/8b9abf652a7442dab6e4b4877a09a906_8de35e1b2fe64fbfbbf6647a6caaaebb.json","name":"8b9abf652a7442dab6e4b4877a09a906_8de35e1b2fe64fbfbbf6647a6caaaebb","resourceType":"ArithResultResource"},{"resourceConfigID":null,"supportedMediaTypes":["application/xml","text/xml","application/json","application/fastjson","application/rjson","text/html","application/jsonp","application/x-java-serialized-object","application/geojson"],"path":"http://192.168.20.182:8090/iserver/services/data-World/rest/data/featureResults/8b9abf652a7442dab6e4b4877a09a906_3a3ea69440194352b4df976cb1aa40f6.json","name":"8b9abf652a7442dab6e4b4877a09a906_3a3ea69440194352b4df976cb1aa40f6","resourceType":"ArithResultResource"}]'
self.check_api(DataService.get_featureResults, self.baseuri + "/services/data-World/rest/data/featureResults.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr))
jsonstr = '{"postResultType":"CreateChild","newResourceID":"8b9abf652a7442dab6e4b4877a09a906_2a07c9dca97e4742898cfc6068b3f130","succeed":true,"newResourceLocation":"http://192.168.20.182:8090/iserver/services/data-World/rest/data/featureResults/8b9abf652a7442dab6e4b4877a09a906_2a07c9dca97e4742898cfc6068b3f130.json"}'
entity = PostFeatureResultsItem()
entity.getFeatureMode = GetFeatureMode.ID
entity.datasetNames = ["World:Capitals"]
entity.ids = ["1", "2", "3"]
self.check_api(DataService.post_featureResults, self.baseuri + "/services/data-World/rest/data/featureResults.json",
HttpMethod.POST, httpretty.Response(status=200, body=jsonstr), entity=entity)
jsonstr = '{"features":[],"featureUriList":["http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/name/World/datasets/name/Capitals/features/2","http://192.168.20.182:8090/iserver/services/data-World/rest/data/datasources/name/World/datasets/name/Capitals/features/3"],"totalCount":2,"featureCount":2}'
self.check_api(DataService.get_featureResult, self.baseuri + "/services/data-World/rest/data/featureResults/123.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), featureResultId='123')
jsonstr = '{"postResultType":"CreateChild","newResourceID":"8b9abf652a7442dab6e4b4877a09a906_89ad051daecc4b38ba0fb2ae26b0dabb","succeed":true,"newResourceLocation":"http://192.168.20.182:8090/iserver/services/data-World/rest/data/coordtransfer/8b9abf652a7442dab6e4b4877a09a906_89ad051daecc4b38ba0fb2ae26b0dabb.json"}'
entity = PostCoordtransferItem()
p1 = Point2D()
p1.x = 113.98533886568
p1.y = 4.39179640504
p2 = Point2D()
p2.x = 112.98533886568
p2.y = 3.39179640504
entity.sourcePoints = [p1, p2]
entity.sourceEpsgCode = 4326
entity.targetEpsgCode = 3857
self.check_api(DataService.post_coordtransfer, self.baseuri + "/services/data-World/rest/data/coordtransfer.json",
HttpMethod.POST, httpretty.Response(status=200, body=jsonstr), entity=entity)
jsonstr = '[{"center":{"x":1.2688789880426234E7,"y":489371.98562014726},"parts":[1],"style":null,"prjCoordSys":null,"id":0,"type":"POINT","partTopo":null,"points":[{"x":1.2688789880426234E7,"y":489371.98562014726}]},{"center":{"x":1.257747038963296E7,"y":377793.7702294255},"parts":[1],"style":null,"prjCoordSys":null,"id":0,"type":"POINT","partTopo":null,"points":[{"x":1.257747038963296E7,"y":377793.7702294255}]}]'
self.check_api(DataService.get_coordtransfer, self.baseuri + "/services/data-World/rest/data/coordtransfer/123.json",
HttpMethod.GET, httpretty.Response(status=200, body=jsonstr), coordtransferResult='123')
| 332.701695
| 28,315
| 0.652837
| 14,065
| 98,147
| 4.534945
| 0.046356
| 0.130942
| 0.115985
| 0.122429
| 0.912533
| 0.897294
| 0.886366
| 0.860843
| 0.824799
| 0.820566
| 0
| 0.230524
| 0.055213
| 98,147
| 294
| 28,316
| 333.833333
| 0.457338
| 0.000153
| 0
| 0.454861
| 0
| 0.208333
| 0.854697
| 0.730133
| 0
| 0
| 0
| 0.003401
| 0
| 1
| 0.006944
| false
| 0
| 0.034722
| 0
| 0.045139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
ce5c78334eb77fef9a9981ddbf769bed22b70ed4
| 4,921
|
py
|
Python
|
test_autolens/plot/test_get_visuals.py
|
Jammy2211/AutoLens
|
bc132a21d1a52248f08f198474e29f985e365d85
|
[
"MIT"
] | null | null | null |
test_autolens/plot/test_get_visuals.py
|
Jammy2211/AutoLens
|
bc132a21d1a52248f08f198474e29f985e365d85
|
[
"MIT"
] | 10
|
2017-12-22T11:39:33.000Z
|
2018-01-30T09:13:16.000Z
|
test_autolens/plot/test_get_visuals.py
|
Jammy2211/AutoLens
|
bc132a21d1a52248f08f198474e29f985e365d85
|
[
"MIT"
] | null | null | null |
from os import path
import pytest
import autolens.plot as aplt
from autolens.plot.get_visuals import GetVisuals2D
directory = path.dirname(path.realpath(__file__))
@pytest.fixture(name="plot_path")
def make_profile_plotter_setup():
return path.join(
"{}".format(path.dirname(path.realpath(__file__))), "files", "plots", "profiles"
)
def test__2d__via_tracer(tracer_x2_plane_7x7, grid_2d_7x7):
visuals_2d = aplt.Visuals2D(vectors=2)
include_2d = aplt.Include2D(
origin=True,
border=True,
light_profile_centres=True,
mass_profile_centres=True,
critical_curves=True,
)
get_visuals = GetVisuals2D(include=include_2d, visuals=visuals_2d)
visuals_2d_via = get_visuals.via_tracer_from(
tracer=tracer_x2_plane_7x7, grid=grid_2d_7x7, plane_index=0
)
assert visuals_2d_via.origin.in_list == [(0.0, 0.0)]
assert (visuals_2d_via.border == grid_2d_7x7.mask.border_grid_sub_1.binned).all()
assert visuals_2d_via.light_profile_centres.in_list == [
tracer_x2_plane_7x7.galaxies[1].light_profile_0.centre
]
assert visuals_2d_via.mass_profile_centres.in_list == [
tracer_x2_plane_7x7.galaxies[0].mass_profile_0.centre
]
assert (
visuals_2d_via.critical_curves[0]
== tracer_x2_plane_7x7.critical_curves_from(grid=grid_2d_7x7)[0]
).all()
assert visuals_2d_via.vectors == 2
include_2d = aplt.Include2D(
origin=True,
border=True,
light_profile_centres=True,
mass_profile_centres=True,
caustics=True,
)
get_visuals = GetVisuals2D(include=include_2d, visuals=visuals_2d)
visuals_2d_via = get_visuals.via_tracer_from(
tracer=tracer_x2_plane_7x7, grid=grid_2d_7x7, plane_index=1
)
assert visuals_2d_via.origin.in_list == [(0.0, 0.0)]
traced_border = tracer_x2_plane_7x7.traced_grid_2d_list_from(
grid=grid_2d_7x7.mask.border_grid_sub_1.binned
)[1]
assert (visuals_2d_via.border == traced_border).all()
assert visuals_2d_via.light_profile_centres.in_list == [
tracer_x2_plane_7x7.galaxies[1].light_profile_0.centre
]
assert visuals_2d_via.mass_profile_centres is None
assert (
visuals_2d_via.caustics[0]
== tracer_x2_plane_7x7.caustics_from(grid=grid_2d_7x7)[0]
).all()
include_2d = aplt.Include2D(
origin=False,
border=False,
light_profile_centres=False,
mass_profile_centres=False,
critical_curves=False,
)
get_visuals = GetVisuals2D(include=include_2d, visuals=visuals_2d)
visuals_2d_via = get_visuals.via_tracer_from(
tracer=tracer_x2_plane_7x7, grid=grid_2d_7x7, plane_index=0
)
assert visuals_2d_via.origin is None
assert visuals_2d_via.border is None
assert visuals_2d_via.light_profile_centres is None
assert visuals_2d_via.mass_profile_centres is None
assert visuals_2d_via.critical_curves is None
assert visuals_2d_via.vectors == 2
def test__via_fit_imaging_from(fit_imaging_x2_plane_7x7, grid_2d_7x7):
visuals_2d = aplt.Visuals2D(origin=(1.0, 1.0), vectors=2)
include_2d = aplt.Include2D(
origin=True,
mask=True,
border=True,
light_profile_centres=True,
mass_profile_centres=True,
critical_curves=True,
)
get_visuals = GetVisuals2D(include=include_2d, visuals=visuals_2d)
visuals_2d_via = get_visuals.via_fit_imaging_from(fit=fit_imaging_x2_plane_7x7)
assert visuals_2d_via.origin == (1.0, 1.0)
assert (visuals_2d_via.mask == fit_imaging_x2_plane_7x7.mask).all()
assert (
visuals_2d_via.border == fit_imaging_x2_plane_7x7.mask.border_grid_sub_1.binned
).all()
assert visuals_2d_via.light_profile_centres.in_list == [(0.0, 0.0)]
assert visuals_2d_via.mass_profile_centres.in_list == [(0.0, 0.0)]
assert (
visuals_2d_via.critical_curves[0]
== fit_imaging_x2_plane_7x7.tracer.critical_curves_from(grid=grid_2d_7x7)[0]
).all()
assert visuals_2d_via.vectors == 2
include_2d = aplt.Include2D(
origin=False,
mask=False,
border=False,
light_profile_centres=False,
mass_profile_centres=False,
critical_curves=False,
)
get_visuals = GetVisuals2D(include=include_2d, visuals=visuals_2d)
visuals_2d_via = get_visuals.via_fit_imaging_from(fit=fit_imaging_x2_plane_7x7)
assert visuals_2d_via.origin == (1.0, 1.0)
assert visuals_2d_via.mask is None
assert visuals_2d_via.border is None
assert visuals_2d_via.light_profile_centres is None
assert visuals_2d_via.mass_profile_centres is None
assert visuals_2d_via.critical_curves is None
assert visuals_2d_via.vectors == 2
| 33.026846
| 89
| 0.697013
| 692
| 4,921
| 4.534682
| 0.099711
| 0.123327
| 0.137667
| 0.17782
| 0.900574
| 0.833333
| 0.805927
| 0.779477
| 0.76673
| 0.743467
| 0
| 0.051621
| 0.216623
| 4,921
| 148
| 90
| 33.25
| 0.762387
| 0
| 0
| 0.610169
| 0
| 0
| 0.006076
| 0
| 0
| 0
| 0
| 0
| 0.262712
| 1
| 0.025424
| false
| 0
| 0.033898
| 0.008475
| 0.067797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce81a4325cb49fc6522a6027564a6a4e8e89793d
| 25,852
|
py
|
Python
|
visualizations.py
|
EMBEDDIA/xl-user-comments
|
ac70858ff9451e31bf5faa30700974291ce8ebfd
|
[
"Unlicense"
] | 1
|
2021-04-20T00:04:02.000Z
|
2021-04-20T00:04:02.000Z
|
visualizations.py
|
azagsam/xl-user-comments
|
ac70858ff9451e31bf5faa30700974291ce8ebfd
|
[
"Unlicense"
] | null | null | null |
visualizations.py
|
azagsam/xl-user-comments
|
ac70858ff9451e31bf5faa30700974291ce8ebfd
|
[
"Unlicense"
] | null | null | null |
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go
from sklearn.cluster import KMeans, MeanShift
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
import umap
from sklearn.mixture import GaussianMixture
from sklearn.metrics.pairwise import cosine_similarity
import networkx as nx
from sklearn.preprocessing import MinMaxScaler
from utils.helpers import load_sentences_from_file, load_sentences_from_AutoSentiNews, \
load_sentences_from_cro_comments, scale_centrality_scores, \
get_context_for_sentences
from utils.encoders import SentenceBERT, CMLM, LaBSE, LASER
import json
import numpy as np
from sentence_transformers import SentenceTransformer, util
from tqdm import tqdm
import os
from utils.LexRank import degree_centrality_scores
def plot_clustering_gm(sentences, encoder, rec_method_name, reduction_method, reduction_method_params, cluster_params):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# reduce dimensionality
print(reduction_method)
rm = reduction_method(**reduction_method_params)
reduced_dim_embeddings = rm.fit_transform(embeddings)
gm = GaussianMixture(**cluster_params).fit(reduced_dim_embeddings)
clustering = gm.predict(reduced_dim_embeddings)
# plot the points
xs = [x for x, _ in reduced_dim_embeddings]
ys = [y for _, y in reduced_dim_embeddings]
labels = [f'Cluster {c}' for c in clustering]
df = pd.DataFrame(
{
'x': xs,
'y': ys,
'cluster': labels,
'sentences': sentences
}
)
fig = px.scatter(df,
x='x',
y='y',
hover_name='sentences',
color='cluster',
title=f'<b>Dimensionality reduced by {rec_method_name}, colored with GaussianMixture File{i}</b>'
)
fig.update_traces(marker=dict(size=12,
line=dict(width=2,
color='DarkSlateGrey')),
selector=dict(mode='markers'))
fig.add_trace(
go.Scatter(
x=[x for x, _ in gm.means_],
y=[y for _, y in gm.means_],
showlegend=False,
hovertext=list(range(len(gm.means_))),
mode='markers',
marker=dict(
color='Black',
size=16,
symbol='x',
)
)
)
fig.show()
def plot_clustering_gm_show_text(sentences, encoder, rec_method_name, reduction_method, reduction_method_params, cluster_params):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# reduce dimensionality
print(reduction_method)
rm = reduction_method(**reduction_method_params)
reduced_dim_embeddings = rm.fit_transform(embeddings)
gm = GaussianMixture(**cluster_params).fit(reduced_dim_embeddings)
clustering = gm.predict(reduced_dim_embeddings)
# plot the points
xs = [x for x, _ in reduced_dim_embeddings]
ys = [y for _, y in reduced_dim_embeddings]
labels = [f'Cluster {c}' for c in clustering]
df = pd.DataFrame(
{
'x': xs,
'y': ys,
'cluster': labels,
'sentences': sentences
}
)
fig = px.scatter(df,
x='x',
y='y',
text='sentences',
color='cluster',
title=f'<b>Dimensionality reduced by {rec_method_name}, colored with GaussianMixture File{i}</b>'
)
fig.update_traces(textposition='bottom center')
fig.update_traces(marker=dict(size=12,
line=dict(width=2,
color='DarkSlateGrey')),
selector=dict(mode='markers'),
)
fig.add_trace(
go.Scatter(
x=[x for x, _ in gm.means_],
y=[y for _, y in gm.means_],
showlegend=False,
hovertext=list(range(len(gm.means_))),
mode='markers',
marker=dict(
color='Black',
size=16,
symbol='x',
)
)
)
fig.show()
def plot_clustering_kmeans(sentences, encoder, rec_method_name, reduction_method, reduction_method_params, cluster_params):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# reduce dimensionality
print(reduction_method)
rm = reduction_method(**reduction_method_params)
reduced_dim_embeddings = rm.fit_transform(embeddings)
kmeans = KMeans(**cluster_params).fit(reduced_dim_embeddings)
# plot the points
xs = [x for x, _ in reduced_dim_embeddings]
ys = [y for _, y in reduced_dim_embeddings]
labels = [f'Cluster {c}' for c in kmeans.labels_]
df = pd.DataFrame(
{
'x': xs,
'y': ys,
'cluster': labels,
'sentences': sentences
}
)
fig = px.scatter(df,
x='x',
y='y',
hover_name='sentences',
color='cluster',
title=f'<b>Dimensionality reduced by {rec_method_name}, colored with KMeans File{i} </b>'
)
fig.update_traces(marker=dict(size=12,
line=dict(width=2,
color='DarkSlateGrey')),
selector=dict(mode='markers'))
fig.add_trace(
go.Scatter(
x=[x for x, _ in kmeans.cluster_centers_],
y=[y for _, y in kmeans.cluster_centers_],
showlegend=False,
hovertext=list(range(len(kmeans.cluster_centers_))),
mode='markers',
marker=dict(
color='Black',
size=16,
symbol='x',
)
)
)
fig.show()
def texrank_barplot(sentences, encoder, rec_method_name, reduction_method, reduction_method_params):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# # reduce dimensionality
# print(reduction_method)
# if rec_method_name != 'None':
# rm = reduction_method(**reduction_method_params)
# embeddings = rm.fit_transform(embeddings)
# similarity matrix
sim_mat = cosine_similarity(embeddings)
np.fill_diagonal(sim_mat, 0)
# rescale
scaler = MinMaxScaler(feature_range=(0, 1))
sim_mat = scaler.fit_transform(sim_mat.flatten().reshape(-1, 1)).reshape(len(embeddings), len(embeddings))
np.fill_diagonal(sim_mat, 0)
# calculate pagerank
nx_graph = nx.from_numpy_array(sim_mat)
scores = nx.pagerank(nx_graph, max_iter=500) # number of cycles to converge
# ranked_sentences = sorted(((scores[i], s) for i, s in enumerate(sentences)), reverse=True)
score_list = [scores[sent_idx] for sent_idx in range(len(sentences))]
df = pd.DataFrame(
{
'scores': score_list,
'sentences': sentences,
}
)
fig = px.bar(df,
y=list(range(len(sentences))),
x='scores',
text='sentences',
orientation='h',
color='scores',
color_continuous_scale='OrRd',
title=f'<b>TextRank - Bar plot File{i}</b>'
)
fig.update_traces( # texttemplate='%{text:.2s}',
textposition='inside')
fig.update_yaxes(autorange="reversed",
)
fig.show()
def lexrank_barplot(sentences, encoder, rec_method_name, reduction_method, reduction_method_params, q):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# # reduce dimensionality
# print(reduction_method)
# if rec_method_name != 'None':
# rm = reduction_method(**reduction_method_params)
# embeddings = rm.fit_transform(embeddings)
# Compute the pair-wise cosine similarities
cos_scores = util.pytorch_cos_sim(embeddings.numpy(), embeddings.numpy())
# rescale
scaler = MinMaxScaler(feature_range=(0, 1))
cos_scores = scaler.fit_transform(cos_scores.flatten().reshape(-1, 1)).reshape(len(embeddings), len(embeddings))
# Compute the centrality for each sentence
centrality_scores = degree_centrality_scores(cos_scores, threshold=0.2)
# # scale for visualization purposes
# centrality_scores = scale_centrality_scores(centrality_scores, q=q)
df = pd.DataFrame(
{
'centrality': centrality_scores,
'sentences': sentences,
}
)
fig = px.bar(df,
y=list(range(len(sentences))),
x='centrality',
text='sentences',
orientation='h',
color='centrality',
color_continuous_scale='OrRd',
title=f'<b>LexRank - Bar plot File{i}</b>'
)
fig.update_traces(textposition='inside')
fig.update_yaxes(autorange="reversed")
fig.show()
def lexrank_nodes_edges(sentences, encoder, rec_method_name, reduction_method, reduction_method_params, q):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# Compute the pair-wise cosine similarities
cos_scores = util.pytorch_cos_sim(embeddings.numpy(), embeddings.numpy())
# rescale
scaler = MinMaxScaler(feature_range=(0, 1))
cos_scores = scaler.fit_transform(cos_scores.flatten().reshape(-1, 1)).reshape(len(embeddings), len(embeddings))
# Compute the centrality for each sentence
centrality_scores = degree_centrality_scores(cos_scores, threshold=0.2)
# scale for visualization purposes
if q:
centrality_scores = scale_centrality_scores(centrality_scores, q=q)
# We argsort so that the first element is the sentence with the highest score
# most_central_sentence_indices = np.argsort(-centrality_scores)
# reduce dimensionality
print(rec_method_name)
assert rec_method_name != 'None'
rm = reduction_method(**reduction_method_params)
pos = rm.fit_transform(embeddings)
# get weights
weights = cos_scores
np.fill_diagonal(weights, 0)
G = nx.from_numpy_array(weights)
edge_x = []
edge_y = []
for edge in G.edges():
x0, y0 = pos[edge[0]]
x1, y1 = pos[edge[1]]
if weights[edge[0], edge[1]] > 0.5:
edge_x.append(x0)
edge_x.append(x1)
edge_x.append(None)
edge_y.append(y0)
edge_y.append(y1)
edge_y.append(None)
edge_trace = go.Scatter(
x=edge_x, y=edge_y,
line=dict(width=0.75,
color='#888'),
hoverinfo='none',
mode='lines')
node_x = []
node_y = []
for node in G.nodes():
x, y = pos[node]
node_x.append(x)
node_y.append(y)
node_trace = go.Scatter(
x=node_x, y=node_y,
mode='markers',
hoverinfo='text',
marker=dict(
showscale=True,
# colorscale options
# 'Greys' | 'YlGnBu' | 'Greens' | 'YlOrRd' | 'Bluered' | 'RdBu' |
# 'Reds' | 'Blues' | 'Picnic' | 'Rainbow' | 'Portland' | 'Jet' |
# 'Hot' | 'Blackbody' | 'Earth' | 'Electric' | 'Viridis' |
colorscale='Reds',
# reversescale=True,
color=[],
size=[s * 10 for s in centrality_scores],
colorbar=dict(
thickness=15,
title='Centrality Score',
xanchor='left',
titleside='right'
),
line_width=1))
node_adjacencies = []
node_text = []
for node, weight in enumerate(centrality_scores):
node_adjacencies.append(weight)
node_text.append(sentences[node])
node_trace.marker.color = node_adjacencies
node_trace.text = node_text
fig = go.Figure(data=[edge_trace, node_trace],
layout=go.Layout(
title=f'<b>LexRank Summarization File{i}</b>',
showlegend=False,
hovermode='closest',
margin=dict(b=20, l=5, r=5, t=40),
# annotations=[ dict(
# #text="Python code: <a href='https://plotly.com/ipython-notebooks/network-graphs/'>
# # https://plotly.com/ipython-notebooks/network-graphs/</a>",
# showarrow=False,
# xref="paper", yref="paper",
# x=0.005, y=-0.002 ) ],
# xaxis=dict(showgrid=False, zeroline=False, showticklabels=False),
# yaxis=dict(showgrid=False, zeroline=False, showticklabels=False))
))
fig.show()
def textrank_nodes_edges(sentences, encoder, rec_method_name, reduction_method, reduction_method_params, q):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# # reduce dimensionality
# print(reduction_method)
# if rec_method_name != 'None':
# rm = reduction_method(**reduction_method_params)
# embeddings = rm.fit_transform(embeddings)
# similarity matrix
sim_mat = cosine_similarity(embeddings)
np.fill_diagonal(sim_mat, 0)
# rescale
scaler = MinMaxScaler(feature_range=(0, 1))
sim_mat = scaler.fit_transform(sim_mat.flatten().reshape(-1, 1)).reshape(len(embeddings), len(embeddings))
np.fill_diagonal(sim_mat, 0)
# calculate pagerank
nx_graph = nx.from_numpy_array(sim_mat)
scores = nx.pagerank(nx_graph, max_iter=500) # number of cycles to converge
# ranked_sentences = sorted(((scores[i], s) for i, s in enumerate(sentences)), reverse=True)
score_list = [scores[sent_idx] for sent_idx in range(len(sentences))]
# reduce dimensionality
print(rec_method_name)
assert rec_method_name != 'None'
rm = reduction_method(**reduction_method_params)
pos = rm.fit_transform(embeddings)
# get weights
weights = sim_mat
centrality_scores = np.array(score_list)
if q:
centrality_scores = scale_centrality_scores(centrality_scores, q=q)
np.fill_diagonal(weights, 0)
G = nx.from_numpy_array(weights)
edge_x = []
edge_y = []
for edge in G.edges():
x0, y0 = pos[edge[0]]
x1, y1 = pos[edge[1]]
if weights[edge[0], edge[1]] > 0.8:
edge_x.append(x0)
edge_x.append(x1)
edge_x.append(None)
edge_y.append(y0)
edge_y.append(y1)
edge_y.append(None)
edge_trace = go.Scatter(
x=edge_x, y=edge_y,
line=dict(width=1,
color='#888'),
hoverinfo='none',
mode='lines')
node_x = []
node_y = []
for node in G.nodes():
x, y = pos[node]
node_x.append(x)
node_y.append(y)
node_trace = go.Scatter(
x=node_x, y=node_y,
mode='markers',
hoverinfo='text',
marker=dict(
showscale=True,
# colorscale options
# 'Greys' | 'YlGnBu' | 'Greens' | 'YlOrRd' | 'Bluered' | 'RdBu' |
# 'Reds' | 'Blues' | 'Picnic' | 'Rainbow' | 'Portland' | 'Jet' |
# 'Hot' | 'Blackbody' | 'Earth' | 'Electric' | 'Viridis' |
colorscale='Greens',
# reversescale=True,
color=[],
size=[s * 10 for s in centrality_scores],
colorbar=dict(
thickness=15,
title='Score',
xanchor='left',
titleside='right'
),
line_width=1))
# wrap a sentence with neighbour sentences
context = get_context_for_sentences(sentences)
node_adjacencies = []
node_text = []
for node, weight in enumerate(centrality_scores):
node_adjacencies.append(weight)
node_text.append(context[node])
node_trace.marker.color = node_adjacencies
node_trace.text = node_text
fig = go.Figure(data=[edge_trace, node_trace],
layout=go.Layout(
title=f'<b>TextRank visualization with {rec_method_name} File{i}</b>',
showlegend=False,
hovermode='closest',
margin=dict(b=20, l=5, r=5, t=40),
# annotations=[ dict(
# #text="Python code: <a href='https://plotly.com/ipython-notebooks/network-graphs/'>
# # https://plotly.com/ipython-notebooks/network-graphs/</a>",
# showarrow=False,
# xref="paper", yref="paper",
# x=0.005, y=-0.002 ) ],
# xaxis=dict(showgrid=False, zeroline=False, showticklabels=False),
# yaxis=dict(showgrid=False, zeroline=False, showticklabels=False))
))
fig.show()
def lexrank_nodes_edges_show_text(sentences, encoder, rec_method_name, reduction_method, reduction_method_params, q):
print(encoder)
sentence_encoder = encoder()
# load sentences
embeddings = sentence_encoder.encode_sentences(sentences)
# Compute the pair-wise cosine similarities
cos_scores = util.pytorch_cos_sim(embeddings.numpy(), embeddings.numpy())
# rescale
scaler = MinMaxScaler(feature_range=(0, 1))
cos_scores = scaler.fit_transform(cos_scores.flatten().reshape(-1, 1)).reshape(len(embeddings), len(embeddings))
# Compute the centrality for each sentence
centrality_scores = degree_centrality_scores(cos_scores, threshold=0.2)
# scale for visualization purposes
if q:
centrality_scores = scale_centrality_scores(centrality_scores, q=q)
# We argsort so that the first element is the sentence with the highest score
# most_central_sentence_indices = np.argsort(-centrality_scores)
# reduce dimensionality
print(rec_method_name)
assert rec_method_name != 'None'
rm = reduction_method(**reduction_method_params)
pos = rm.fit_transform(embeddings)
# get weights
weights = cos_scores
np.fill_diagonal(weights, 0)
G = nx.from_numpy_array(weights)
edge_x = []
edge_y = []
for edge in G.edges():
x0, y0 = pos[edge[0]]
x1, y1 = pos[edge[1]]
if weights[edge[0], edge[1]] > 0.5:
edge_x.append(x0)
edge_x.append(x1)
edge_x.append(None)
edge_y.append(y0)
edge_y.append(y1)
edge_y.append(None)
edge_trace = go.Scatter(
x=edge_x, y=edge_y,
line=dict(width=0.75,
color='#888'),
hoverinfo='none',
mode='lines')
node_x = []
node_y = []
for node in G.nodes():
x, y = pos[node]
node_x.append(x)
node_y.append(y)
important_sentences = [sent if weight > 8.0 else 'X' for sent, weight in zip(sentences, centrality_scores)]
print(important_sentences)
node_trace = go.Scatter(
x=node_x, y=node_y,
# mode='markers',
# hoverinfo='text',
mode="markers+text",
name="Markers and Text",
text=important_sentences,
textposition="bottom center",
marker=dict(
showscale=True,
# colorscale options
# 'Greys' | 'YlGnBu' | 'Greens' | 'YlOrRd' | 'Bluered' | 'RdBu' |
# 'Reds' | 'Blues' | 'Picnic' | 'Rainbow' | 'Portland' | 'Jet' |
# 'Hot' | 'Blackbody' | 'Earth' | 'Electric' | 'Viridis' |
colorscale='Reds',
# reversescale=True,
color=[],
size=[s * 10 for s in centrality_scores],
colorbar=dict(
thickness=15,
title='Centrality Score',
xanchor='left',
titleside='right'
),
line_width=1))
node_adjacencies = []
node_text = []
for node, weight in enumerate(centrality_scores):
node_adjacencies.append(weight)
if weight > 8.0:
node_text.append(sentences[node])
else:
node_text.append('')
node_trace.marker.color = node_adjacencies
node_trace.text = node_text
fig = go.Figure(data=[edge_trace, node_trace],
layout=go.Layout(
title=f'<b>LexRank Summarization File{i}</b>',
showlegend=False,
hovermode='closest',
margin=dict(b=20, l=5, r=5, t=40),
# annotations=[ dict(
# #text="Python code: <a href='https://plotly.com/ipython-notebooks/network-graphs/'>
# # https://plotly.com/ipython-notebooks/network-graphs/</a>",
# showarrow=False,
# xref="paper", yref="paper",
# x=0.005, y=-0.002 ) ],
# xaxis=dict(showgrid=False, zeroline=False, showticklabels=False),
# yaxis=dict(showgrid=False, zeroline=False, showticklabels=False))
))
fig.show()
def load_sentences(name, **kwargs):
if name == 'single_file':
return load_sentences_from_file(file=kwargs['file'])
if name == 'asn':
return load_sentences_from_AutoSentiNews(idx=kwargs['idx'])
if name == 'cro_comments':
return load_sentences_from_cro_comments(idx=kwargs['idx'])
if __name__ == '__main__':
# Sentence encoders and dimensionality reduction methods
encoders = {
'SentenceBERT': SentenceBERT,
'CMLM': CMLM,
'LaBSE': LaBSE,
# 'LASER': LASER # Todo: NOTE: you have to specify language because the model uses different tokenizers
}
reduction_methods = {
'pca': PCA,
'umap': umap.UMAP,
't-sne': TSNE,
'None': None
}
reduction_methods_params = {
'pca': {'n_components': 2},
'umap': {'n_neighbors': 5, 'random_state': 42}, # check neighbors parameter
't-sne': {'n_components': 2, 'perplexity': 30, 'random_state': 42},
'None': None
}
cluster_params = {
'kmeans': {'n_clusters': 3, 'random_state': 0},
'gaussian_mixture': {'n_components': 3, 'covariance_type': 'full'}
}
# import data
for i in range(100):
# sentences = load_sentences(name='single_file', file='data/cluster-translations.txt')
sentences = load_sentences(name='cro_comments_vecernji', idx=i)
# if len(sentences) < 10 or len(sentences) > 30:
# continue
# plot clustering
enc_name = 'SentenceBERT'
rec_method_name = 'None'
# # # TODO 1/12/21 Clustering
# plot_clustering_gm(sentences, encoders[enc_name],
# rec_method_name,
# reduction_methods[rec_method_name],
# reduction_methods_params[rec_method_name],
# cluster_params['gaussian_mixture'])
#
# plot_clustering_gm_show_text(sentences, encoders[enc_name],
# rec_method_name,
# reduction_methods[rec_method_name],
# reduction_methods_params[rec_method_name],
# cluster_params['gaussian_mixture'])
# plot_clustering_kmeans(sentences, encoders[enc_name],
# rec_method_name,
# reduction_methods[rec_method_name],
# reduction_methods_params[rec_method_name],
# cluster_params['kmeans'])
# # # # TODO 1/12/21 Barplots
texrank_barplot(sentences, encoders[enc_name],
rec_method_name,
reduction_methods[rec_method_name],
reduction_methods_params[rec_method_name])
# lexrank_barplot(sentences, encoders[enc_name],
# rec_method_name,
# reduction_methods[rec_method_name],
# reduction_methods_params[rec_method_name],
# q=0.5) # centality score scaler
# # TODO 1/12/21 Nodes + Edges
# lexrank_nodes_edges(sentences, encoders[enc_name],
# rec_method_name,
# reduction_methods[rec_method_name],
# reduction_methods_params[rec_method_name],
# q=0.5) # score scaler
# lexrank_nodes_edges_show_text(sentences, encoders[enc_name],
# rec_method_name,
# reduction_methods[rec_method_name],
# reduction_methods_params[rec_method_name],
# q=0.5) # score scaler
#
# textrank_nodes_edges(sentences, encoders[enc_name],
# rec_method_name,
# reduction_methods[rec_method_name],
# reduction_methods_params[rec_method_name],
# q=0.25) # score scaler
| 33.971091
| 129
| 0.570981
| 2,802
| 25,852
| 5.051749
| 0.120985
| 0.029248
| 0.042247
| 0.037301
| 0.840198
| 0.826351
| 0.819993
| 0.814765
| 0.807983
| 0.804097
| 0
| 0.012349
| 0.320285
| 25,852
| 760
| 130
| 34.015789
| 0.793194
| 0.232322
| 0
| 0.723141
| 0
| 0
| 0.067206
| 0.001068
| 0
| 0
| 0
| 0.001316
| 0.006198
| 1
| 0.018595
| false
| 0
| 0.045455
| 0
| 0.070248
| 0.030992
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c74346b49c1db11fc6fa39bcb8d4214cac551c8
| 3,222
|
py
|
Python
|
stix2generator/test/test_object_generator_integer.py
|
majacQ/cti-stix-generator
|
7465ecd29ef6caabf9f1b60ad45dad789c475028
|
[
"BSD-3-Clause"
] | 20
|
2020-12-10T18:16:28.000Z
|
2022-02-20T19:30:53.000Z
|
stix2generator/test/test_object_generator_integer.py
|
majacQ/cti-stix-generator
|
7465ecd29ef6caabf9f1b60ad45dad789c475028
|
[
"BSD-3-Clause"
] | 26
|
2021-01-13T23:32:19.000Z
|
2022-03-29T06:47:02.000Z
|
stix2generator/test/test_object_generator_integer.py
|
majacQ/cti-stix-generator
|
7465ecd29ef6caabf9f1b60ad45dad789c475028
|
[
"BSD-3-Clause"
] | 8
|
2020-12-14T23:10:16.000Z
|
2021-12-06T13:07:24.000Z
|
import pytest
import stix2generator.exceptions
def test_integer_closed(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"minimum": -1,
"maximum": 1
})
assert -1 <= value <= 1
def test_integer_open(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"exclusiveMinimum": -1,
"exclusiveMaximum": 1
})
assert value == 0
def test_integer_half_open_lower(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"exclusiveMinimum": -1,
"maximum": 1
})
assert -1 < value <= 1
def test_integer_half_open_upper(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"minimum": -1,
"exclusiveMaximum": 1
})
assert -1 <= value < 1
def test_integer_float_bounds(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"minimum": -1.5,
"maximum": 1.5
})
assert -1 <= value <= 1
assert isinstance(value, int)
def test_integer_bounds_single_int(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"minimum": 1,
"maximum": 1
})
assert value == 1
def test_integer_float_bounds_single_int(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"minimum": 0.5,
"maximum": 1.5
})
assert value == 1
assert isinstance(value, int)
def test_integer_empty_interval(object_generator):
with pytest.raises(stix2generator.exceptions.ObjectGenerationError):
object_generator.generate_from_spec({
"type": "integer",
"exclusiveMinimum": 1,
"exclusiveMaximum": 1
})
def test_integer_empty_interval_float_bounds(object_generator):
with pytest.raises(stix2generator.exceptions.ObjectGenerationError):
object_generator.generate_from_spec({
"type": "integer",
"minimum": 1.1,
"maximum": 1.9
})
def test_integer_very_large(object_generator, num_trials):
for _ in range(num_trials):
value = object_generator.generate_from_spec({
"type": "integer",
"minimum": 10**9999,
"maximum": 10**10000
})
assert 10**9999 <= value <= 10**10000
def test_integer_inverted_bounds(object_generator):
with pytest.raises(stix2generator.exceptions.ObjectGenerationError):
object_generator.generate_from_spec({
"type": "integer",
"minimum": 1,
"maximum": -1
})
| 26.195122
| 72
| 0.597455
| 337
| 3,222
| 5.397626
| 0.145401
| 0.181418
| 0.084662
| 0.163277
| 0.884552
| 0.828477
| 0.828477
| 0.808686
| 0.793293
| 0.744915
| 0
| 0.029528
| 0.295779
| 3,222
| 122
| 73
| 26.409836
| 0.772146
| 0
| 0
| 0.719101
| 1
| 0
| 0.10211
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 1
| 0.123596
| false
| 0
| 0.022472
| 0
| 0.146067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0c83fa06806fa2b9503719bfae29f77528aea4c8
| 46,914
|
py
|
Python
|
chengyubert/models/modeling_affection.py
|
VisualJoyce/ChengyuBERT
|
605db3a4b3241dd4d02baa41a68bf23b5b00b36d
|
[
"MIT"
] | 8
|
2020-12-11T13:06:16.000Z
|
2022-03-01T13:47:51.000Z
|
chengyubert/models/modeling_affection.py
|
VisualJoyce/ChengyuBERT
|
605db3a4b3241dd4d02baa41a68bf23b5b00b36d
|
[
"MIT"
] | 18
|
2020-12-31T07:32:55.000Z
|
2022-02-07T08:33:30.000Z
|
chengyubert/models/modeling_affection.py
|
VisualJoyce/ChengyuBERT
|
605db3a4b3241dd4d02baa41a68bf23b5b00b36d
|
[
"MIT"
] | 3
|
2021-03-25T01:08:56.000Z
|
2022-03-22T09:05:57.000Z
|
from typing import Any, Tuple, Optional
import torch
from torch import nn
from transformers import BertModel, BertPreTrainedModel
from chengyubert.models import register_model
from chengyubert.modules.attention import ContrastiveCoAttention
from chengyubert.modules.utils import WeightNormClassifier, LatentComposition, sequence_mask
from chengyubert.optim.loss import FocalLoss
class CaloClassifier(nn.Module):
def __init__(self, hidden_size, hidden_dropout_prob):
super().__init__()
# Emotion-7 Predictor
self.fine_emotion_classifier = WeightNormClassifier(hidden_size,
21,
hidden_size,
hidden_dropout_prob)
# Sentiment Predictor
self.sentiment_classifier = WeightNormClassifier(hidden_size,
4,
hidden_size,
hidden_dropout_prob)
def forward(self, emotion_state) -> Tuple[Any, Any]:
# affection prediction
fine_emotion_logits = self.fine_emotion_classifier(emotion_state)
# coarse_emotion_logits = self.coarse_emotion_classifier(emotion_state)
sentiment_logits = self.sentiment_classifier(emotion_state)
return fine_emotion_logits, sentiment_logits
class SlideClassifier(nn.Module):
def __init__(self, hidden_size, hidden_dropout_prob):
super().__init__()
self.sentiment_classifier = WeightNormClassifier(hidden_size,
3,
hidden_size,
hidden_dropout_prob)
def forward(self, emotion_state) -> None:
# slide prediction
return self.sentiment_classifier(emotion_state)
classifiers = {
'calo': CaloClassifier,
'slide': SlideClassifier
}
class CaloLoss(nn.Module):
def __init__(self, use_focal, weights):
super().__init__()
fine_emotion_weights, sentiment_weights = weights
if use_focal:
self.loss_fct = FocalLoss()
else:
self.fine_emotion_loss_fct = nn.CrossEntropyLoss(weight=fine_emotion_weights, reduction='none')
self.sentiment_loss_fct = nn.CrossEntropyLoss(weight=sentiment_weights, reduction='none')
def forward(self, logits, targets) -> Tuple[Optional[Any], Tuple[Any, Any]]:
over_logits, (fine_emotion_logits, sentiment_logits) = logits
if over_logits is not None:
loss_fct = nn.CrossEntropyLoss(reduction='none')
over_loss = loss_fct(over_logits, targets[:, 0])
else:
over_loss = None
fine_emotion_loss = self.fine_emotion_loss_fct(fine_emotion_logits, targets[:, 2])
sentiment_loss = self.sentiment_loss_fct(sentiment_logits, targets[:, 3])
return over_loss, (fine_emotion_loss, sentiment_loss)
class SlideLoss(nn.Module):
def __init__(self, use_focal, weights):
super().__init__()
if use_focal:
self.loss_fct = FocalLoss()
else:
self.loss_fct = nn.CrossEntropyLoss(weight=weights, reduction='none')
def forward(self, logits, targets) -> Tuple[Any, Any]:
over_logits, sentiment_logits = logits
if over_logits is not None:
loss_fct = nn.CrossEntropyLoss(reduction='none')
over_loss = loss_fct(over_logits, targets[:, 0])
else:
over_loss = None
sentiment_emotion_loss = self.loss_fct(sentiment_logits, targets[:, 1])
return over_loss, sentiment_emotion_loss
loss_calculators = {
'calo': CaloLoss,
'slide': SlideLoss
}
@register_model('affection-max-pooling')
class AffectionMaxPooling(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def forward(self, input_ids, token_type_ids, attention_mask, positions, option_ids, gather_index,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
# n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids,
token_type_ids=token_type_ids,
attention_mask=attention_mask)
encoded_context = encoded_outputs[0]
# idiom_length = (gather_index > 0).sum(1)
gather_index = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index)
composed_states, _ = idiom_states.max(dim=1)
emotion_state = self.channel1_linear(composed_states).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
_, losses = self.loss_fct([None, logits], targets)
return None, None, None, losses
else:
return None, None, None, logits
@register_model('affection-max-pooling-masked')
class AffectionMaxPoolingMasked(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def forward(self, input_ids, token_type_ids, attention_mask, positions, option_ids, gather_index,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
idiom_length = (gather_index > 0).sum(1)
gather_index_unsqueezed = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index_unsqueezed)
gather_index_masked_unsqueezed = gather_index_masked.unsqueeze(-1).expand(-1, -1,
self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked_unsqueezed)
composed_states, _ = idiom_states.max(dim=1)
composed_states_masked, _ = idiom_states_masked.max(dim=1)
channel1 = self.channel1_linear(composed_states).tanh()
channel2 = self.channel2_linear(composed_states_masked).tanh()
# affection prediction
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
_, losses = self.loss_fct([None, logits], targets)
return None, None, None, losses
else:
return None, None, None, logits
@register_model('affection-max-pooling-masked-latent-idiom')
class AffectionMaxPoolingMaskedLatentIdiom(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.idiom_embedding = nn.Embedding(opts.len_idiom_vocab, config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def vocab(self, blank_states):
idiom_embeddings = self.idiom_embedding(self.enlarged_candidates)
logits = torch.einsum('bd,nd->bn', [blank_states, idiom_embeddings]) # (b, 256, 10)
state = torch.einsum('bn,nd->bd', [logits.softmax(dim=-1), idiom_embeddings]) # (b, 256, 10)
return logits, state
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
gather_index = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index)
gather_index_masked_unsqueezed = gather_index_masked.unsqueeze(-1).expand(-1, -1,
self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked_unsqueezed)
composed_states, _ = idiom_states.max(dim=1)
composed_states_masked, _ = idiom_states_masked.max(dim=1)
over_logits, idiom_attn_state = self.vocab(composed_states_masked)
channel1 = self.channel1_linear(composed_states).tanh()
channel2 = self.channel2_linear(torch.cat([composed_states_masked, idiom_attn_state], dim=-1)).tanh()
# affection prediction
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
over_loss, losses = self.loss_fct([over_logits, logits], targets)
return None, over_loss, None, losses
else:
return None, over_logits, None, logits
@register_model('affection-max-pooling-masked-latent-idiom-with-gate')
class AffectionMaxPoolingMaskedLatentIdiomWithGate(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.idiom_embedding = nn.Embedding(opts.len_idiom_vocab, config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.register_parameter(name='g',
param=torch.nn.Parameter(torch.ones(config.hidden_size) / config.hidden_size))
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def vocab(self, blank_states):
idiom_embeddings = self.idiom_embedding(self.enlarged_candidates)
logits = torch.einsum('bd,nd->bn', [blank_states, idiom_embeddings]) # (b, 256, 10)
state = torch.einsum('bn,nd->bd', [logits.softmax(dim=-1), idiom_embeddings]) # (b, 256, 10)
return logits, state
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
gather_index = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index)
gather_index_masked_unsqueezed = gather_index_masked.unsqueeze(-1).expand(-1, -1,
self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked_unsqueezed)
composed_states, _ = idiom_states.max(dim=1)
composed_states_masked, _ = idiom_states_masked.max(dim=1)
over_logits, idiom_attn_state = self.vocab(composed_states_masked)
channel1 = self.channel1_linear(composed_states).tanh()
channel2 = self.channel2_linear(torch.cat([composed_states_masked, idiom_attn_state], dim=-1)).tanh()
gate = torch.sigmoid(self.g * channel1)
s = gate * channel1 + (1 - gate) * channel2
# affection prediction
emotion_state = self.compose_linear(s).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
over_loss, losses = self.loss_fct([over_logits, logits], targets)
return None, over_loss, None, losses
else:
return None, over_logits, None, logits
@register_model('affection-compose')
class AffectionCompose(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.idiom_compose = LatentComposition(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def forward(self, input_ids, token_type_ids, attention_mask, positions, option_ids, gather_index,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
# n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids,
token_type_ids=token_type_ids,
attention_mask=attention_mask)
encoded_context = encoded_outputs[0]
idiom_length = (gather_index > 0).sum(1)
gather_index = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index)
composed_states, _, select_masks = self.idiom_compose(idiom_states, idiom_length)
emotion_state = self.channel1_linear(composed_states).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
_, losses = self.loss_fct([None, logits], targets)
return None, None, select_masks, losses
else:
return None, None, select_masks, logits
@register_model('affection-compose-masked')
class AffectionComposeMasked(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.idiom_compose = LatentComposition(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def forward(self, input_ids, token_type_ids, attention_mask, positions, option_ids, gather_index,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
idiom_length = (gather_index > 0).sum(1)
gather_index = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index)
gather_index_masked = gather_index_masked.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked)
composed_states, _, select_masks = self.idiom_compose(idiom_states, idiom_length)
# composed_states_masked, _, select_masks_masked = self.idiom_compose(idiom_states_masked, idiom_length)
composed_states_masked, _ = idiom_states_masked.max(dim=1)
channel1 = self.channel1_linear(composed_states).tanh()
channel2 = self.channel2_linear(composed_states_masked).tanh()
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
_, losses = self.loss_fct([None, logits], targets)
return None, None, select_masks, losses
else:
return None, None, select_masks, logits
@register_model('affection-compose-masked-latent-idiom')
class AffectionComposeMaskedLatentIdiom(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.idiom_embedding = nn.Embedding(opts.len_idiom_vocab, config.hidden_size)
self.idiom_compose = LatentComposition(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def vocab(self, blank_states):
idiom_embeddings = self.idiom_embedding(self.enlarged_candidates)
logits = torch.einsum('bd,nd->bn', [blank_states, idiom_embeddings]) # (b, 256, 10)
state = torch.einsum('bn,nd->bd', [logits.softmax(dim=-1), idiom_embeddings]) # (b, 256, 10)
return logits, state
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
idiom_length = (gather_index > 0).sum(1)
gather_index = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index)
gather_index_masked = gather_index_masked.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked)
# idiom_states = encoded_context[[i for i in range(len(positions))], positions] # [batch, hidden_state]
composed_states, _, select_masks = self.idiom_compose(idiom_states, idiom_length)
composed_states_masked, _ = idiom_states_masked.max(dim=1)
over_logits, idiom_attn_state = self.vocab(composed_states_masked)
channel1 = self.channel1_linear(composed_states).tanh()
channel2 = self.channel2_linear(torch.cat([composed_states_masked, idiom_attn_state], dim=-1)).tanh()
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
over_loss, losses = self.loss_fct([over_logits, logits], targets)
return None, over_loss, select_masks, losses
else:
return None, over_logits, select_masks, logits
@register_model('affection-compose-masked-latent-idiom-with-gate')
class AffectionComposeMaskedLatentIdiomWithGate(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.use_leaf_rnn = True
self.intra_attention = False
self.gumbel_temperature = 1
self.bidirectional = True
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.idiom_embedding = nn.Embedding(opts.len_idiom_vocab, config.hidden_size)
self.idiom_compose = LatentComposition(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.register_parameter(name='g',
param=torch.nn.Parameter(torch.ones(config.hidden_size) / config.hidden_size))
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def vocab(self, blank_states):
idiom_embeddings = self.idiom_embedding(self.enlarged_candidates)
logits = torch.einsum('bd,nd->bn', [blank_states, idiom_embeddings]) # (b, 256, 10)
state = torch.einsum('bn,nd->bd', [logits.softmax(dim=-1), idiom_embeddings]) # (b, 256, 10)
return logits, state
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
idiom_length = (gather_index > 0).sum(1)
gather_index = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index)
gather_index_masked = gather_index_masked.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked)
# idiom_states = encoded_context[[i for i in range(len(positions))], positions] # [batch, hidden_state]
composed_states, _, select_masks = self.idiom_compose(idiom_states, idiom_length)
composed_states_masked, _ = idiom_states_masked.max(dim=1)
over_logits, idiom_attn_state = self.vocab(composed_states_masked)
channel1 = self.channel1_linear(composed_states).tanh()
channel2 = self.channel2_linear(torch.cat([composed_states_masked, idiom_attn_state], dim=-1)).tanh()
gate = torch.sigmoid(self.g * channel1)
s = gate * channel1 + (1 - gate) * channel2
# affection prediction
emotion_state = self.compose_linear(s).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
over_loss, losses = self.loss_fct([over_logits, logits], targets)
return None, over_loss, select_masks, losses
else:
return None, over_logits, select_masks, logits
@register_model('affection-coattention-masked')
class AffectionCoAttentionMasked(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.coattention = ContrastiveCoAttention(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
gather_index_unsqueezed = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index_unsqueezed)
gather_index_masked_unsqueezed = gather_index_masked.unsqueeze(-1).expand(-1, -1,
self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked_unsqueezed)
# composed_states_masked, _ = idiom_states_masked.max(dim=1)
L = idiom_states
I = idiom_states_masked
idiom_length = (gather_index > 0).sum(1)
idiom_mask = sequence_mask(idiom_length)
C_L, C_I = self.coattention(L, I, idiom_mask, idiom_mask)
channel1 = self.channel1_linear(C_L).tanh()
channel2 = self.channel2_linear(C_I).tanh()
# slide prediction
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
_, losses = self.loss_fct([None, logits], targets)
return None, None, None, losses
else:
return None, None, None, logits
@register_model('affection-coattention-masked-latent-idiom')
class AffectionCoAttentionMaskedLatentIdiom(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.idiom_embedding = nn.Embedding(opts.len_idiom_vocab, config.hidden_size)
self.coattention = ContrastiveCoAttention(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def vocab(self, blank_states):
idiom_embeddings = self.idiom_embedding(self.enlarged_candidates)
logits = torch.einsum('bd,nd->bn', [blank_states, idiom_embeddings]) # (b, 256, 10)
state = torch.einsum('bn,nd->bd', [logits.softmax(dim=-1), idiom_embeddings]) # (b, 256, 10)
return logits, state
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
gather_index_unsqueezed = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index_unsqueezed)
gather_index_masked_unsqueezed = gather_index_masked.unsqueeze(-1).expand(-1, -1,
self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked_unsqueezed)
# composed_states_masked, _ = idiom_states_masked.max(dim=1)
L = idiom_states
I = idiom_states_masked
idiom_length = (gather_index > 0).sum(1)
idiom_mask = sequence_mask(idiom_length)
C_L, C_I = self.coattention(L, I, idiom_mask, idiom_mask)
over_logits, idiom_attn_state = self.vocab(C_I)
channel1 = self.channel1_linear(C_L).tanh()
channel2 = self.channel2_linear(torch.cat([C_I, idiom_attn_state], dim=-1)).tanh()
# slide prediction
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
over_loss, losses = self.loss_fct([over_logits, logits], targets)
return None, over_loss, None, losses
else:
return None, over_logits, None, logits
@register_model('affection-coattention-masked-latent-idiom-with-gate')
class AffectionCoAttentionMaskedLatentIdiomWithGate(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.idiom_embedding = nn.Embedding(opts.len_idiom_vocab, config.hidden_size)
self.coattention = ContrastiveCoAttention(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.register_parameter(name='g',
param=torch.nn.Parameter(torch.ones(config.hidden_size) / config.hidden_size))
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def vocab(self, blank_states):
idiom_embeddings = self.idiom_embedding(self.enlarged_candidates)
logits = torch.einsum('bd,nd->bn', [blank_states, idiom_embeddings]) # (b, 256, 10)
state = torch.einsum('bn,nd->bd', [logits.softmax(dim=-1), idiom_embeddings]) # (b, 256, 10)
return logits, state
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
gather_index_unsqueezed = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index_unsqueezed)
gather_index_masked_unsqueezed = gather_index_masked.unsqueeze(-1).expand(-1, -1,
self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked_unsqueezed)
# composed_states_masked, _ = idiom_states_masked.max(dim=1)
L = idiom_states
I = idiom_states_masked
idiom_length = (gather_index > 0).sum(1)
idiom_mask = sequence_mask(idiom_length)
C_L, C_I = self.coattention(L, I, idiom_mask, idiom_mask)
over_logits, idiom_attn_state = self.vocab(C_I)
channel1 = self.channel1_linear(C_L).tanh()
channel2 = self.channel2_linear(torch.cat([C_I, idiom_attn_state], dim=-1)).tanh()
gate = torch.sigmoid(self.g * channel1)
s = gate * channel1 + (1 - gate) * channel2
# affection prediction
emotion_state = self.compose_linear(s).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
over_loss, losses = self.loss_fct([over_logits, logits], targets)
return None, over_loss, None, losses
else:
return None, over_logits, None, logits
@register_model('affection-coattention-masked-full')
class AffectionCoAttentionMaskedFull(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.coattention = ContrastiveCoAttention(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
gather_index_unsqueezed = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index_unsqueezed)
L = idiom_states
mask_L = torch.gather(attention_mask[0], dim=1, index=gather_index)
I = encoded_context_masked
mask_I = attention_mask[1]
C_L, C_I = self.coattention(L, I, mask_L, mask_I)
channel1 = self.channel1_linear(C_L).tanh()
channel2 = self.channel2_linear(C_I).tanh()
# slide prediction
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
_, losses = self.loss_fct([None, logits], targets)
return None, None, None, losses
else:
return None, None, None, logits
@register_model('affection-coattention-masked-full-latent-idiom')
class AffectionCoAttentionMaskedFullLatentIdiom(BertPreTrainedModel):
def __init__(self, config, opts):
super().__init__(config)
self.project = opts.project
self.model_name = opts.model
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
if opts.enlarged_candidates is not None:
self.register_buffer('enlarged_candidates', torch.tensor(opts.enlarged_candidates, dtype=torch.long))
else:
self.register_buffer('enlarged_candidates', torch.arange(opts.len_idiom_vocab))
print(self.enlarged_candidates.size())
self.idiom_embedding = nn.Embedding(opts.len_idiom_vocab, config.hidden_size)
# self.idiom_compose = LatentComposition(config.hidden_size)
self.coattention = ContrastiveCoAttention(config.hidden_size)
self.channel1_linear = nn.Linear(config.hidden_size, config.hidden_size)
self.channel2_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.compose_linear = nn.Linear(config.hidden_size * 2, config.hidden_size)
self.classifier = classifiers[self.project](config.hidden_size, config.hidden_dropout_prob)
self.loss_fct = loss_calculators[self.project](opts.use_focal, opts.weights)
self.init_weights()
def vocab(self, blank_states):
idiom_embeddings = self.idiom_embedding(self.enlarged_candidates)
logits = torch.einsum('bd,nd->bn', [blank_states, idiom_embeddings]) # (b, 256, 10)
state = torch.einsum('bn,nd->bd', [logits.softmax(dim=-1), idiom_embeddings]) # (b, 256, 10)
return logits, state
def forward(self, input_ids, token_type_ids, attention_mask, positions, gather_index, option_ids=None,
inputs_embeds=None, options_embeds=None, compute_loss=False, targets=None):
n, batch_size, seq_len = input_ids.size()
encoded_outputs = self.bert(input_ids.view(n * batch_size, seq_len),
token_type_ids=token_type_ids.view(n * batch_size, seq_len),
attention_mask=attention_mask.view(n * batch_size, seq_len))
encoded_context = encoded_outputs[0].view(n, batch_size, seq_len, -1)[0]
encoded_context_masked = encoded_outputs[0].view(n, batch_size, seq_len, -1)[1]
gather_index, gather_index_masked = gather_index
gather_index_unsqueezed = gather_index.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(input_ids)
idiom_states = torch.gather(encoded_context, dim=1, index=gather_index_unsqueezed)
gather_index_masked = gather_index_masked.unsqueeze(-1).expand(-1, -1, self.config.hidden_size).type_as(
input_ids)
idiom_states_masked = torch.gather(encoded_context_masked, dim=1, index=gather_index_masked)
# idiom_states = encoded_context[[i for i in range(len(positions))], positions] # [batch, hidden_state]
# composed_states, _, select_masks = self.idiom_compose(idiom_states, idiom_length)
composed_states_masked, _ = idiom_states_masked.max(dim=1)
L = idiom_states
mask_L = torch.gather(attention_mask[0], dim=1, index=gather_index)
I = encoded_context_masked
mask_I = attention_mask[1]
C_L, C_I = self.coattention(L, I, mask_L, mask_I)
over_logits, idiom_attn_state = self.vocab(C_I)
channel1 = self.channel1_linear(C_L).tanh()
channel2 = self.channel2_linear(torch.cat([C_I, idiom_attn_state], dim=-1)).tanh()
# slide prediction
emotion_state = self.compose_linear(torch.cat([channel1, channel2], dim=-1)).tanh()
# affection prediction
logits = self.classifier(emotion_state)
if compute_loss:
over_loss, losses = self.loss_fct([over_logits, logits], targets)
return None, over_loss, None, losses
else:
return None, over_logits, None, logits
| 46.727092
| 119
| 0.672997
| 5,774
| 46,914
| 5.157949
| 0.031694
| 0.062454
| 0.069304
| 0.029682
| 0.945806
| 0.931469
| 0.926029
| 0.925425
| 0.925156
| 0.918374
| 0
| 0.011519
| 0.226521
| 46,914
| 1,003
| 120
| 46.773679
| 0.809215
| 0.03468
| 0
| 0.896159
| 0
| 0
| 0.021534
| 0.009905
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058321
| false
| 0
| 0.01138
| 0.001422
| 0.146515
| 0.012802
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0cb3bd082fb4ece4bd9ba9098318926ae68b69bc
| 1,770
|
py
|
Python
|
tests/integration/blueprints/admin/ticketing/test_views.py
|
homeworkprod/byceps
|
cd0f9f37f7b5eb517106ec761acc7e0bdf75e22e
|
[
"BSD-3-Clause"
] | 23
|
2015-08-03T23:28:54.000Z
|
2018-12-12T20:11:45.000Z
|
tests/integration/blueprints/admin/ticketing/test_views.py
|
homeworkprod/byceps
|
cd0f9f37f7b5eb517106ec761acc7e0bdf75e22e
|
[
"BSD-3-Clause"
] | 1
|
2018-09-30T18:18:24.000Z
|
2018-09-30T18:18:24.000Z
|
tests/integration/blueprints/admin/ticketing/test_views.py
|
homeworkprod/byceps
|
cd0f9f37f7b5eb517106ec761acc7e0bdf75e22e
|
[
"BSD-3-Clause"
] | 9
|
2015-08-06T16:41:36.000Z
|
2018-09-25T11:17:31.000Z
|
"""
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
def test_ticket_index(party, ticketing_admin_client, ticket):
url = f'/admin/ticketing/tickets/for_party/{party.id}'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
def test_ticket_view(ticketing_admin_client, ticket):
url = f'/admin/ticketing/tickets/{ticket.id}'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
def test_appoint_user_form(ticketing_admin_client, ticket):
url = f'/admin/ticketing/tickets/{ticket.id}/appoint_user'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
def test_bundle_index(party, ticketing_admin_client, bundle):
url = f'/admin/ticketing/bundles/for_party/{party.id}'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
def test_bundle_view(ticketing_admin_client, bundle):
url = f'/admin/ticketing/bundles/{bundle.id}'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
def test_category_index(ticketing_admin_client, party):
url = f'/admin/ticketing/categories/for_party/{party.id}'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
def test_category_create_form(ticketing_admin_client, party):
url = f'/admin/ticketing/categories/for_party/{party.id}/create'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
def test_category_update_form(ticketing_admin_client, category):
url = f'/admin/ticketing/categories/categories/{category.id}/update'
response = ticketing_admin_client.get(url)
assert response.status_code == 200
| 33.396226
| 72
| 0.759887
| 239
| 1,770
| 5.359833
| 0.167364
| 0.174863
| 0.249805
| 0.112412
| 0.827479
| 0.790008
| 0.790008
| 0.790008
| 0.790008
| 0.67057
| 0
| 0.020806
| 0.131073
| 1,770
| 52
| 73
| 34.038462
| 0.812094
| 0.054802
| 0
| 0.5
| 0
| 0
| 0.224024
| 0.224024
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0b2bfbcc5a8cf3566a46d1acc491f29950741675
| 54
|
py
|
Python
|
py65816/utils/db_console.py
|
tmr4/py65816
|
00d9a378ebd0e27378c8ce9e6611a7fec0020b44
|
[
"BSD-3-Clause"
] | 1
|
2022-02-22T18:04:26.000Z
|
2022-02-22T18:04:26.000Z
|
py65816/utils/db_console.py
|
tmr4/py65816
|
00d9a378ebd0e27378c8ce9e6611a7fec0020b44
|
[
"BSD-3-Clause"
] | null | null | null |
py65816/utils/db_console.py
|
tmr4/py65816
|
00d9a378ebd0e27378c8ce9e6611a7fec0020b44
|
[
"BSD-3-Clause"
] | null | null | null |
import msvcrt
def kbhit():
return msvcrt.kbhit()
| 10.8
| 25
| 0.685185
| 7
| 54
| 5.285714
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203704
| 54
| 4
| 26
| 13.5
| 0.860465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
0b692a7c34e0999838eb880d965d783a35666f6b
| 10,667
|
py
|
Python
|
test/federated_aggregator/test_clip_norm_aggregators.py
|
SSSuperTIan/Sherpa.ai-Federated-Learning-Framework
|
a30d73a018526f1033ee0ec57489c4c6e2f15b0a
|
[
"Apache-2.0"
] | 1
|
2021-03-18T07:31:36.000Z
|
2021-03-18T07:31:36.000Z
|
test/federated_aggregator/test_clip_norm_aggregators.py
|
SSSuperTIan/Sherpa.ai-Federated-Learning-Framework
|
a30d73a018526f1033ee0ec57489c4c6e2f15b0a
|
[
"Apache-2.0"
] | null | null | null |
test/federated_aggregator/test_clip_norm_aggregators.py
|
SSSuperTIan/Sherpa.ai-Federated-Learning-Framework
|
a30d73a018526f1033ee0ec57489c4c6e2f15b0a
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from shfl.federated_aggregator import NormClipAggregator
from shfl.federated_aggregator import CDPAggregator
from shfl.federated_aggregator import WeakDPAggregator
def test_aggregated_weights_NormClip():
num_clients = 10
num_layers = 5
tams = [[128, 64], [64, 64], [64, 64], [64, 32], [32, 10]]
weights = []
for i in range(num_clients):
weights.append([np.random.rand(tams[j][0], tams[j][1]) for j in range(num_layers)])
clients_params = weights
avgfa = NormClipAggregator(clip=100)
aggregated_weights = avgfa.aggregate_weights(clients_params)
clients_params = np.array(weights)
own_agg = np.array([np.mean(clients_params[:, layer], axis=0) for layer in range(num_layers)])
for i in range(num_layers):
assert np.array_equal(own_agg[i], aggregated_weights[i])
assert len(aggregated_weights) == num_layers
def test_aggregated_weights_WeakDP():
num_clients = 10
num_layers = 5
tams = [[128, 64], [64, 64], [64, 64], [64, 32], [32, 10]]
clip = 100
np.random.seed(0)
weights = []
for i in range(num_clients):
weights.append([np.random.rand(tams[j][0], tams[j][1]) for j in range(num_layers)])
clients_params = weights
np.random.seed(0)
avgfa = WeakDPAggregator(clip=clip)
aggregated_weights = avgfa.aggregate_weights(clients_params)
np.random.seed(0)
clients_params = np.array(weights)
serialized_params = np.array([avgfa._serialize(v) for v in clients_params ])
for i,v in enumerate(serialized_params):
serialized_params[i] = v
clients_params = np.array([avgfa._deserialize(v) for v in serialized_params ])
own_agg = np.array([np.mean(clients_params[:, layer], axis=0) for layer in range(num_layers)])
for i, v in enumerate(own_agg):
noise = np.random.normal(loc=0.0, scale=0.025*clip/num_clients, size=own_agg[i].shape)
own_agg[i] = v + noise
for i in range(num_layers):
assert np.array_equal(own_agg[i], aggregated_weights[i])
assert len(aggregated_weights) == num_layers
def test_aggregated_weights_multidimensional_2D_array_NormClip():
num_clients = 10
num_rows_params = 3
num_cols_params = 9
clients_params = []
for i in range(num_clients):
clients_params.append(np.random.rand(num_rows_params, num_cols_params))
clients_params = np.array(clients_params)
avgfa = NormClipAggregator(clip=10)
aggregated_weights = avgfa.aggregate_weights(clients_params)
own_agg = np.zeros((num_rows_params, num_cols_params))
for i_client in range(num_clients):
own_agg += clients_params[i_client]
own_agg = own_agg / num_clients
assert np.array_equal(own_agg, aggregated_weights)
assert aggregated_weights.shape == own_agg.shape
def test_aggregated_weights_multidimensional_2D_array_WeakDP():
num_clients = 10
num_rows_params = 3
num_cols_params = 9
clip = 100
clients_params = []
for i in range(num_clients):
clients_params.append(np.random.rand(num_rows_params, num_cols_params))
clients_params = np.array(clients_params)
np.random.seed(0)
avgfa = WeakDPAggregator(clip=clip)
aggregated_weights = avgfa.aggregate_weights(clients_params)
np.random.seed(0)
own_agg = np.zeros((num_rows_params, num_cols_params))
for v in clients_params:
own_agg += v
own_agg = own_agg / num_clients
noise = np.random.normal(loc=0.0, scale=0.025*clip/num_clients, size=own_agg.shape)
own_agg += noise
assert np.array_equal(own_agg, aggregated_weights)
assert len(aggregated_weights) == own_agg.shape[0]
def test_aggregated_weights_multidimensional_3D_array_NormClip():
num_clients = 10
num_rows_params = 3
num_cols_params = 9
num_k_params = 5
clients_params = []
for i in range(num_clients):
clients_params.append(np.random.rand(num_rows_params, num_cols_params, num_k_params))
clients_params = np.array(clients_params)
avgfa = NormClipAggregator(clip=10)
aggregated_weights = avgfa.aggregate_weights(clients_params)
own_agg = np.zeros((num_rows_params, num_cols_params, num_k_params))
for i_client in range(num_clients):
own_agg += clients_params[i_client]
own_agg = own_agg / num_clients
assert np.array_equal(own_agg, aggregated_weights)
assert aggregated_weights.shape == own_agg.shape
def test_aggregated_weights_multidimensional_3D_array_WeakDP():
num_clients = 10
num_rows_params = 3
num_cols_params = 9
num_k_params = 5
clip = 10
clients_params = []
for i in range(num_clients):
clients_params.append(np.random.rand(num_rows_params, num_cols_params, num_k_params))
clients_params = np.array(clients_params)
np.random.seed(0)
avgfa = WeakDPAggregator(clip=clip)
aggregated_weights = avgfa.aggregate_weights(clients_params)
np.random.seed(0)
own_agg = np.zeros((num_rows_params, num_cols_params, num_k_params))
for v in clients_params:
own_agg += v
own_agg = own_agg / num_clients
noise = np.random.normal(loc=0.0, scale=0.025*clip/num_clients, size=own_agg.shape)
own_agg += noise
assert np.array_equal(own_agg, aggregated_weights)
assert len(aggregated_weights) == own_agg.shape[0]
def test_aggregated_weights_list_of_arrays_NormClip():
num_clients = 10
clients_params = []
for i_client in range(num_clients):
clients_params.append([np.random.rand(30, 20),
np.random.rand(20, 30),
np.random.rand(50, 40)])
avgfa = NormClipAggregator(clip=100)
aggregated_weights = avgfa.aggregate_weights(clients_params)
own_agg = [np.zeros((30, 20)),
np.zeros((20, 30)),
np.zeros((50, 40))]
for i_client in range(num_clients):
for i_params in range(len(clients_params[0])):
own_agg[i_params] += clients_params[i_client][i_params]
for i_params in range(len(clients_params[0])):
own_agg[i_params] = own_agg[i_params] / num_clients
for i_params in range(len(clients_params[0])):
assert np.array_equal(own_agg[i_params], aggregated_weights[i_params])
assert aggregated_weights[i_params].shape == own_agg[i_params].shape
def test_aggregated_weights_list_of_arrays_WeakDP():
num_clients = 10
seed = 1231231
clip = 100
clients_params = []
for i_client in range(num_clients):
clients_params.append([np.random.rand(30, 20),
np.random.rand(20, 30),
np.random.rand(50, 40)])
np.random.seed(seed)
avgfa = WeakDPAggregator(clip=clip)
aggregated_weights = avgfa.aggregate_weights(clients_params)
np.random.seed(seed)
own_agg = [np.zeros((30, 20)),
np.zeros((20, 30)),
np.zeros((50, 40))]
for i_client in range(num_clients):
for i_params in range(len(clients_params[0])):
own_agg[i_params] += clients_params[i_client][i_params]
for i_params in range(len(clients_params[0])):
noise = np.random.normal(loc=0.0, scale=0.025*clip/num_clients, size=own_agg[i_params].shape)
own_agg[i_params] = own_agg[i_params] / num_clients + noise
for i_params in range(len(clients_params[0])):
assert np.allclose(own_agg[i_params], aggregated_weights[i_params])
assert aggregated_weights[i_params].shape == own_agg[i_params].shape
def test_serialization_deserialization_multidimensional_3D_array():
num_clients = 10
num_rows_params = 3
num_cols_params = 9
num_k_params = 5
clients_params = []
for i in range(num_clients):
clients_params.append(np.random.rand(num_rows_params, num_cols_params, num_k_params))
avgfa = NormClipAggregator(clip=100)
serialized_params = np.array([avgfa._serialize(client) for client in clients_params])
deserialized_params = np.array([avgfa._deserialize(client) for client in serialized_params])
assert np.array_equal(deserialized_params, clients_params)
def test_serialization_deserialization_multidimensional_2D_array():
num_clients = 10
num_rows_params = 3
num_cols_params = 9
clients_params = []
for i in range(num_clients):
clients_params.append(np.random.rand(num_rows_params, num_cols_params))
avgfa = NormClipAggregator(clip=100)
serialized_params = np.array([avgfa._serialize(client) for client in clients_params])
deserialized_params = np.array([avgfa._deserialize(client) for client in serialized_params])
assert np.array_equal(deserialized_params, clients_params)
def test_serialization_deserialization():
num_clients = 10
num_layers = 5
tams = [[128, 64], [64, 64], [64, 64], [64, 32], [32, 10]]
weights = []
for i in range(num_clients):
weights.append([np.random.rand(tams[j][0], tams[j][1]) for j in range(num_layers)])
clients_params = weights
avgfa = NormClipAggregator(clip=100)
serialized_params = np.array([avgfa._serialize(client) for client in clients_params])
for i, client in enumerate(serialized_params):
deserialized = avgfa._deserialize(client)
for j, arr in enumerate(deserialized):
assert np.array_equal(arr,clients_params[i][j])
def test_serialization_deserialization_list_of_arrays():
num_clients = 10
clients_params = []
for i_client in range(num_clients):
clients_params.append([np.random.rand(30, 20),
np.random.rand(20, 30),
np.random.rand(50, 40)])
avgfa = NormClipAggregator(clip=100)
serialized_params = np.array([avgfa._serialize(client) for client in clients_params])
for i, client in enumerate(serialized_params):
deserialized = avgfa._deserialize(client)
for j, arr in enumerate(deserialized):
assert np.array_equal(arr,clients_params[i][j])
def test_serialization_deserialization_mixed_list():
num_clients = 10
clients_params = []
for i_client in range(num_clients):
clients_params.append([np.random.rand(),
np.random.rand(20, 30),
np.random.rand(50, 40)])
avgfa = NormClipAggregator(clip=100)
serialized_params = np.array([avgfa._serialize(client) for client in clients_params])
for i, client in enumerate(serialized_params):
deserialized = avgfa._deserialize(client)
for j, arr in enumerate(deserialized):
assert np.array_equal(arr,clients_params[i][j])
| 34.521036
| 101
| 0.689135
| 1,492
| 10,667
| 4.644102
| 0.058311
| 0.125704
| 0.034637
| 0.041709
| 0.95757
| 0.913552
| 0.906191
| 0.891471
| 0.891471
| 0.891471
| 0
| 0.032014
| 0.206431
| 10,667
| 308
| 102
| 34.633117
| 0.786533
| 0
| 0
| 0.860262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091703
| 1
| 0.056769
| false
| 0
| 0.017467
| 0
| 0.074236
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f037beb5facd240b25f8c88fa806ad8e1e34753f
| 2,948
|
py
|
Python
|
tests/test_version.py
|
jyaquinas/git-changelog
|
81e04757d5b5397c9e5ef2e6ece3f92c0f07e916
|
[
"ISC"
] | 56
|
2019-03-09T07:38:08.000Z
|
2022-03-27T19:16:53.000Z
|
tests/test_version.py
|
jyaquinas/git-changelog
|
81e04757d5b5397c9e5ef2e6ece3f92c0f07e916
|
[
"ISC"
] | 30
|
2019-05-03T09:46:37.000Z
|
2022-03-31T10:07:38.000Z
|
tests/test_version.py
|
pawamoy/gitolog
|
dde6959b62a9e38c5d60b95241fd15bf4c4a50a3
|
[
"0BSD"
] | 21
|
2019-05-07T13:30:45.000Z
|
2022-03-22T21:03:30.000Z
|
"""Test version bumping."""
from git_changelog.build import bump
def test_bump(): # noqa: WPS218 (too many assert statements)
"""Test default version bumping."""
assert bump("0.0.1") == "0.0.2"
assert bump("0.1.0") == "0.1.1"
assert bump("0.1.1") == "0.1.2"
assert bump("1.0.0") == "1.0.1"
assert bump("1.0.1") == "1.0.2"
assert bump("1.1.0") == "1.1.1"
assert bump("1.1.1") == "1.1.2"
assert bump("v0.0.1") == "v0.0.2"
assert bump("v0.1.0") == "v0.1.1"
assert bump("v0.1.1") == "v0.1.2"
assert bump("v1.0.0") == "v1.0.1"
assert bump("v1.0.1") == "v1.0.2"
assert bump("v1.1.0") == "v1.1.1"
assert bump("v1.1.1") == "v1.1.2"
def test_bump_patch(): # noqa: WPS218 (too many assert statements)
"""Test patch version bumping."""
assert bump("0.0.1", "patch") == "0.0.2"
assert bump("0.1.0", "patch") == "0.1.1"
assert bump("0.1.1", "patch") == "0.1.2"
assert bump("1.0.0", "patch") == "1.0.1"
assert bump("1.0.1", "patch") == "1.0.2"
assert bump("1.1.0", "patch") == "1.1.1"
assert bump("1.1.1", "patch") == "1.1.2"
assert bump("v0.0.1", "patch") == "v0.0.2"
assert bump("v0.1.0", "patch") == "v0.1.1"
assert bump("v0.1.1", "patch") == "v0.1.2"
assert bump("v1.0.0", "patch") == "v1.0.1"
assert bump("v1.0.1", "patch") == "v1.0.2"
assert bump("v1.1.0", "patch") == "v1.1.1"
assert bump("v1.1.1", "patch") == "v1.1.2"
def test_bump_minor(): # noqa: WPS218 (too many assert statements)
"""Test minor version bumping."""
assert bump("0.0.1", "minor") == "0.1.0"
assert bump("0.1.0", "minor") == "0.2.0"
assert bump("0.1.1", "minor") == "0.2.0"
assert bump("1.0.0", "minor") == "1.1.0"
assert bump("1.0.1", "minor") == "1.1.0"
assert bump("1.1.0", "minor") == "1.2.0"
assert bump("1.1.1", "minor") == "1.2.0"
assert bump("v0.0.1", "minor") == "v0.1.0"
assert bump("v0.1.0", "minor") == "v0.2.0"
assert bump("v0.1.1", "minor") == "v0.2.0"
assert bump("v1.0.0", "minor") == "v1.1.0"
assert bump("v1.0.1", "minor") == "v1.1.0"
assert bump("v1.1.0", "minor") == "v1.2.0"
assert bump("v1.1.1", "minor") == "v1.2.0"
def test_bump_major(): # noqa: WPS218 (too many assert statements)
"""Test major version bumping."""
assert bump("0.0.1", "major") == "0.1.0"
assert bump("0.1.0", "major") == "0.2.0"
assert bump("0.1.1", "major") == "0.2.0"
assert bump("1.0.0", "major") == "2.0.0"
assert bump("1.0.1", "major") == "2.0.0"
assert bump("1.1.0", "major") == "2.0.0"
assert bump("1.1.1", "major") == "2.0.0"
assert bump("v0.0.1", "major") == "v0.1.0"
assert bump("v0.1.0", "major") == "v0.2.0"
assert bump("v0.1.1", "major") == "v0.2.0"
assert bump("v1.0.0", "major") == "v2.0.0"
assert bump("v1.0.1", "major") == "v2.0.0"
assert bump("v1.1.0", "major") == "v2.0.0"
assert bump("v1.1.1", "major") == "v2.0.0"
| 36.85
| 67
| 0.510176
| 554
| 2,948
| 2.700361
| 0.046931
| 0.374332
| 0.191176
| 0.088235
| 0.856952
| 0.850267
| 0.754011
| 0.516043
| 0
| 0
| 0
| 0.147084
| 0.197422
| 2,948
| 79
| 68
| 37.316456
| 0.485207
| 0.103121
| 0
| 0
| 0
| 0
| 0.31587
| 0
| 0
| 0
| 0
| 0
| 0.918033
| 1
| 0.065574
| true
| 0
| 0.016393
| 0
| 0.081967
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f06fc3f6b707f24a0e91d4fc52306aedf8cddace
| 27,678
|
py
|
Python
|
orca_models.py
|
jzhoulab/orca
|
8ebfda95dec118ee4069b12d25a2a93f9ea7ec8f
|
[
"Apache-2.0"
] | 22
|
2021-05-10T19:42:00.000Z
|
2022-03-14T08:34:07.000Z
|
orca_models.py
|
jzhoulab/orca
|
8ebfda95dec118ee4069b12d25a2a93f9ea7ec8f
|
[
"Apache-2.0"
] | null | null | null |
orca_models.py
|
jzhoulab/orca
|
8ebfda95dec118ee4069b12d25a2a93f9ea7ec8f
|
[
"Apache-2.0"
] | 4
|
2021-05-28T16:35:35.000Z
|
2022-03-19T12:23:08.000Z
|
"""
This module contains the class definition of all Orca models.
For usage of the models, see the orca_predict module.
"""
import pathlib
import numpy as np
import torch
from torch import nn
from orca_modules import Encoder, Encoder2, Encoder3, Decoder, Decoder_1m, Net
ORCA_PATH = str(pathlib.Path(__file__).parent.absolute())
class H1esc(nn.Module):
"""
Orca H1-ESC model (1-32Mb)
Attributes
----------
net0 : nn.DataParallel(Encoder)
The first section of the multi-resolution encoder
(bp resolution to 4kb resolution).
net : nn.DataParallel(Encoder2)
The second section of the multi-resolution encoder
(4kb resolution to 128kb resolution).
denets : dict(int: nn.DataParallel(Decoder))
Decoders at each level, which are stored in a dictionary
with an integer as key.
normmats : dict(int: numpy.ndarray)
The distance-based background matrices with expected log
fold over background values at each level.
epss : dict(int: float)
The minimum background value at each level. Used for
stablizing the log fold computation by adding
to both the nominator and the denominator.
"""
def __init__(self,):
super(H1esc, self).__init__()
modelstr = "h1esc"
self.net = nn.DataParallel(Encoder2())
self.denet_1 = nn.DataParallel(Decoder())
self.denet_2 = nn.DataParallel(Decoder())
self.denet_4 = nn.DataParallel(Decoder())
self.denet_8 = nn.DataParallel(Decoder())
self.denet_16 = nn.DataParallel(Decoder())
self.denet_32 = nn.DataParallel(Decoder())
num_threads = torch.get_num_threads()
self.net.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net.statedict",
map_location=torch.device("cpu"),
),
strict=True,
)
self.denet_1.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d1.statedict",
map_location=torch.device("cpu"),
),
strict=True,
)
self.denet_2.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d2.statedict",
map_location=torch.device("cpu"),
),
strict=True,
)
self.denet_4.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d4.statedict",
map_location=torch.device("cpu"),
),
strict=True,
)
self.denet_8.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d8.statedict",
map_location=torch.device("cpu"),
),
strict=True,
)
self.denet_16.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d16.statedict",
map_location=torch.device("cpu"),
),
strict=True,
)
self.denet_32.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d32.statedict",
map_location=torch.device("cpu"),
),
strict=True,
)
self.net0 = nn.DataParallel(Encoder())
net0_dict = self.net0.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net0.statedict",
map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {key: pretrained_dict["module." + key] for key in net0_dict}
self.net0.load_state_dict(pretrained_dict_filtered)
self.denet_1_pt = nn.DataParallel(Decoder_1m())
denet_1_pt_dict = self.denet_1_pt.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net0.statedict",
map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {
key: pretrained_dict["module." + key] for key in denet_1_pt_dict
}
self.denet_1_pt.load_state_dict(pretrained_dict_filtered)
self.denet_1_pt.eval()
self.net0.eval()
self.net.eval()
self.denet_1.eval()
self.denet_2.eval()
self.denet_4.eval()
self.denet_8.eval()
self.denet_16.eval()
self.denet_32.eval()
expected_log = np.load(
ORCA_PATH + "/resources/4DNFI9GMP2J8.rebinned.mcool.expected.res4000.npy"
)
normmat = np.exp(expected_log[np.abs(np.arange(8000)[None, :] - np.arange(8000)[:, None])])
normmat_r1 = np.reshape(normmat[:250, :250], (250, 1, 250, 1)).mean(axis=1).mean(axis=2)
normmat_r2 = np.reshape(normmat[:500, :500], (250, 2, 250, 2)).mean(axis=1).mean(axis=2)
normmat_r4 = np.reshape(normmat[:1000, :1000], (250, 4, 250, 4)).mean(axis=1).mean(axis=2)
normmat_r8 = np.reshape(normmat[:2000, :2000], (250, 8, 250, 8)).mean(axis=1).mean(axis=2)
normmat_r16 = (
np.reshape(normmat[:4000, :4000], (250, 16, 250, 16)).mean(axis=1).mean(axis=2)
)
normmat_r32 = (
np.reshape(normmat[:8000, :8000], (250, 32, 250, 32)).mean(axis=1).mean(axis=2)
)
eps1 = np.min(normmat_r1)
eps2 = np.min(normmat_r2)
eps4 = np.min(normmat_r4)
eps8 = np.min(normmat_r8)
eps16 = np.min(normmat_r16)
eps32 = np.min(normmat_r32)
self.normmats = {
1: normmat_r1,
2: normmat_r2,
4: normmat_r4,
8: normmat_r8,
16: normmat_r16,
32: normmat_r32,
}
self.epss = {1: eps1, 2: eps2, 4: eps4, 8: eps8, 16: eps16, 32: eps32}
self.denets = {
1: self.denet_1,
2: self.denet_2,
4: self.denet_4,
8: self.denet_8,
16: self.denet_16,
32: self.denet_32,
}
torch.set_num_threads(num_threads)
class Hff(nn.Module):
"""
Orca HFF model (1-32Mb)
Attributes
----------
net0 : nn.DataParallel(Encoder)
The first section of the multi-resolution encoder
(bp resolution to 4kb resolution).
net : nn.DataParallel(Encoder2)
The second section of the multi-resolution encoder
(4kb resolution to 128kb resolution).
denets : dict(int: nn.DataParallel(Decoder))
Decoders at each level, which are stored in a dictionary
with an integer as key.
normmats : dict(int: numpy.ndarray)
The distance-based background matrices with expected log
fold over background values at each level.
epss : dict(int: float)
The minimum background value at each level. Used for
stablizing the log fold computation by adding
to both the nominator and the denominator.
"""
def __init__(self):
super(Hff, self).__init__()
modelstr = "hff"
self.net = nn.DataParallel(Encoder2())
self.denet_1 = nn.DataParallel(Decoder())
self.denet_2 = nn.DataParallel(Decoder())
self.denet_4 = nn.DataParallel(Decoder())
self.denet_8 = nn.DataParallel(Decoder())
self.denet_16 = nn.DataParallel(Decoder())
self.denet_32 = nn.DataParallel(Decoder())
num_threads = torch.get_num_threads()
self.net.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net.statedict",
map_location=torch.device("cpu"),
),
strict=False,
)
self.denet_1.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d1.statedict",
map_location=torch.device("cpu"),
),
strict=False,
)
self.denet_2.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d2.statedict",
map_location=torch.device("cpu"),
),
strict=False,
)
self.denet_4.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d4.statedict",
map_location=torch.device("cpu"),
),
strict=False,
)
self.denet_8.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d8.statedict",
map_location=torch.device("cpu"),
),
strict=False,
)
self.denet_16.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d16.statedict",
map_location=torch.device("cpu"),
),
strict=False,
)
self.denet_32.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d32.statedict",
map_location=torch.device("cpu"),
),
strict=False,
)
self.net0 = nn.DataParallel(Encoder())
net0_dict = self.net0.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net0.statedict",
map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {key: pretrained_dict["module." + key] for key in net0_dict}
self.net0.load_state_dict(pretrained_dict_filtered)
self.denet_1_pt = nn.DataParallel(Decoder_1m())
denet_1_pt_dict = self.denet_1_pt.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net0.statedict",
map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {
key: pretrained_dict["module." + key] for key in denet_1_pt_dict
}
self.denet_1_pt.load_state_dict(pretrained_dict_filtered)
self.denet_1_pt.eval()
self.net0.eval()
self.net.eval()
self.denet_1.eval()
self.denet_2.eval()
self.denet_4.eval()
self.denet_8.eval()
self.denet_16.eval()
self.denet_32.eval()
expected_log = np.load(
ORCA_PATH + "/resources/4DNFI643OYP9.rebinned.mcool.expected.res4000.npy"
)
normmat = np.exp(expected_log[np.abs(np.arange(8000)[:, None] - np.arange(8000)[None, :])])
normmat_r1 = np.reshape(normmat[:250, :250], (250, 1, 250, 1)).mean(axis=1).mean(axis=2)
normmat_r2 = np.reshape(normmat[:500, :500], (250, 2, 250, 2)).mean(axis=1).mean(axis=2)
normmat_r4 = np.reshape(normmat[:1000, :1000], (250, 4, 250, 4)).mean(axis=1).mean(axis=2)
normmat_r8 = np.reshape(normmat[:2000, :2000], (250, 8, 250, 8)).mean(axis=1).mean(axis=2)
normmat_r16 = (
np.reshape(normmat[:4000, :4000], (250, 16, 250, 16)).mean(axis=1).mean(axis=2)
)
normmat_r32 = (
np.reshape(normmat[:8000, :8000], (250, 32, 250, 32)).mean(axis=1).mean(axis=2)
)
eps1 = np.min(normmat_r1)
eps2 = np.min(normmat_r2)
eps4 = np.min(normmat_r4)
eps8 = np.min(normmat_r8)
eps16 = np.min(normmat_r16)
eps32 = np.min(normmat_r32)
self.normmats = {
1: normmat_r1,
2: normmat_r2,
4: normmat_r4,
8: normmat_r8,
16: normmat_r16,
32: normmat_r32,
}
self.epss = {1: eps1, 2: eps2, 4: eps4, 8: eps8, 16: eps16, 32: eps32}
self.denets = {
1: self.denet_1,
2: self.denet_2,
4: self.denet_4,
8: self.denet_8,
16: self.denet_16,
32: self.denet_32,
}
torch.set_num_threads(num_threads)
class HCTnoc(nn.Module):
"""
Orca cohesin-depleted HCT116 model (1-32Mb)
Attributes
----------
net0 : nn.DataParallel(Encoder)
The first section of the multi-resolution encoder
(bp resolution to 4kb resolution).
net : nn.DataParallel(Encoder2)
The second section of the multi-resolution encoder
(4kb resolution to 128kb resolution).
denets : dict(int: nn.DataParallel(Decoder))
Decoders at each level, which are stored in a dictionary
with an integer as key.
normmats : dict(int: numpy.ndarray)
The distance-based background matrices with expected log
fold over background values at each level.
epss : dict(int: float)
The minimum background value at each level. Used for
stablizing the log fold computation by adding
to both the nominator and the denominator.
"""
def __init__(self):
super(HCTnoc, self).__init__()
modelstr = "hctnoc"
self.net = nn.DataParallel(Encoder2())
self.denet_1 = nn.DataParallel(Decoder())
self.denet_2 = nn.DataParallel(Decoder())
self.denet_4 = nn.DataParallel(Decoder())
self.denet_8 = nn.DataParallel(Decoder())
self.denet_16 = nn.DataParallel(Decoder())
self.denet_32 = nn.DataParallel(Decoder())
self.net.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".net.statedict"), strict=True
)
self.denet_1.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".d1.statedict"), strict=True
)
self.denet_2.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".d2.statedict"), strict=True
)
self.denet_4.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".d4.statedict"), strict=True
)
self.denet_8.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".d8.statedict"), strict=True
)
self.denet_16.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".d16.statedict"), strict=True
)
self.denet_32.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".d32.statedict"), strict=True
)
self.net0 = nn.DataParallel(Encoder())
self.net0.load_state_dict(
torch.load(ORCA_PATH + "/models/orca_" + modelstr + ".net0.statedict"), strict=True
)
self.net0.cuda()
self.net0.eval()
self.net.eval()
self.denet_1.eval()
self.denet_2.eval()
self.denet_4.eval()
self.denet_8.eval()
self.denet_16.eval()
self.denet_32.eval()
smooth_diag = np.load(
ORCA_PATH + "/resources/4DNFILP99QJS.HCT_auxin6h.rebinned.mcool.expected.res4000.npy"
)
normmat = np.exp(smooth_diag[np.abs(np.arange(8000)[None, :] - np.arange(8000)[:, None])])
normmat_r1 = np.reshape(normmat[:250, :250], (250, 1, 250, 1)).mean(axis=1).mean(axis=2)
normmat_r2 = np.reshape(normmat[:500, :500], (250, 2, 250, 2)).mean(axis=1).mean(axis=2)
normmat_r4 = np.reshape(normmat[:1000, :1000], (250, 4, 250, 4)).mean(axis=1).mean(axis=2)
normmat_r8 = np.reshape(normmat[:2000, :2000], (250, 8, 250, 8)).mean(axis=1).mean(axis=2)
normmat_r16 = (
np.reshape(normmat[:4000, :4000], (250, 16, 250, 16)).mean(axis=1).mean(axis=2)
)
normmat_r32 = (
np.reshape(normmat[:8000, :8000], (250, 32, 250, 32)).mean(axis=1).mean(axis=2)
)
eps1 = np.min(normmat_r1)
eps2 = np.min(normmat_r2)
eps4 = np.min(normmat_r4)
eps8 = np.min(normmat_r8)
eps16 = np.min(normmat_r16)
eps32 = np.min(normmat_r32)
self.normmats = {
1: normmat_r1,
2: normmat_r2,
4: normmat_r4,
8: normmat_r8,
16: normmat_r16,
32: normmat_r32,
}
self.epss = {1: eps1, 2: eps2, 4: eps4, 8: eps8, 16: eps16, 32: eps32}
self.denets = {
1: self.denet_1,
2: self.denet_2,
4: self.denet_4,
8: self.denet_8,
16: self.denet_16,
32: self.denet_32,
}
class H1esc_1M(nn.Module):
"""
Orca H1-ESC model (1Mb)
Attributes
----------
net : nn.DataParallel(Net)
Integrated Encoder and Decoder for 1Mb model.
normmats : dict(int: numpy.ndarray)
The distance-based background matrices with expected log
fold over background values at each level.
epss : dict(int: float)
The minimum background value at each level. Used for
stablizing the log fold computation by adding
to both the nominator and the denominator.
"""
def __init__(self,):
super(H1esc_1M, self).__init__()
self.net = nn.DataParallel(Net(num_1d=32))
num_threads = torch.get_num_threads()
net_dict = self.net.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_h1esc.net0.statedict", map_location=torch.device("cpu")
)
pretrained_dict_filtered = {key: pretrained_dict["module." + key] for key in net_dict}
self.net.load_state_dict(pretrained_dict_filtered)
self.net.eval()
expected_log = np.load(
ORCA_PATH + "/resources/4DNFI9GMP2J8.rebinned.mcool.expected.res1000.npy"
)[:1000]
normmat = np.exp(expected_log[np.abs(np.arange(1000)[None, :] - np.arange(1000)[:, None])])
normmat_r = np.reshape(normmat, (250, 4, 250, 4)).mean(axis=1).mean(axis=2)
eps = np.min(normmat_r)
self.normmats = {1: normmat_r}
self.epss = {1: eps}
torch.set_num_threads(num_threads)
def forward(self, x):
pred, _ = self.net.forward(x)
return pred
class Hff_1M(nn.Module):
"""
Orca HFF model (1Mb)
Attributes
----------
net : nn.DataParallel(Net)
Integrated Encoder and Decoder for 1Mb model.
normmats : dict(int: numpy.ndarray)
The distance-based background matrices with expected log
fold over background values at each level.
epss : dict(int: float)
The minimum background value at each level. Used for
stablizing the log fold computation by adding
to both the nominator and the denominator.
"""
def __init__(self,):
super(Hff_1M, self).__init__()
self.net = nn.DataParallel(Net(num_1d=22))
num_threads = torch.get_num_threads()
net_dict = self.net.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_hff.net0.statedict", map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {key: pretrained_dict["module." + key] for key in net_dict}
self.net.load_state_dict(pretrained_dict_filtered)
self.net.eval()
expected = np.exp(
np.load(ORCA_PATH + "/resources/4DNFI643OYP9.rebinned.mcool.expected.res1000.npy")[
:1000
]
)
normmat = expected[np.abs(np.arange(1000)[:, None] - np.arange(1000)[None, :])]
normmat_r = np.reshape(normmat, (250, 4, 250, 4)).mean(axis=1).mean(axis=2)
eps = np.min(normmat_r)
self.normmats = {1: normmat_r}
self.epss = {1: eps}
torch.set_num_threads(num_threads)
def forward(self, x):
pred, _ = self.net.forward(x)
return pred
class H1esc_256M(nn.Module):
"""
Orca H1-ESC model (32-256Mb)
Attributes
----------
net0 : nn.DataParallel(Encoder)
The first section of the multi-resolution encoder
(bp resolution to 4kb resolution).
net1 : nn.DataParallel(Encoder2)
The second section of the multi-resolution encoder
(4kb resolution to 128kb resolution).
net : nn.DataParallel(Encoder3)
The third section of the multi-resolution encoder
(128kb resolution to 1024kb resolution).
denets : dict(int: nn.DataParallel(Decoder))
Decoders at each level, which are stored in a dictionary
with an integer as key.
normmats : dict(int: numpy.ndarray)
The distance-based background matrices with expected log
fold over background values at each level.
epss : dict(int: float)
The minimum background value at each level. Used for
stablizing the log fold computation by adding
to both the nominator and the denominator.
"""
def __init__(self,):
super(H1esc_256M, self).__init__()
modelstr = "h1esc_256m"
self.net = nn.DataParallel(Encoder3())
self.denet_32 = nn.DataParallel(Decoder())
self.denet_64 = nn.DataParallel(Decoder())
self.denet_128 = nn.DataParallel(Decoder())
self.denet_256 = nn.DataParallel(Decoder())
num_threads = torch.get_num_threads()
self.net.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_32.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d32.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_64.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d64.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_128.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d128.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_256.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d256.statedict",
map_location=torch.device("cpu"),
)
)
self.net0 = nn.DataParallel(Encoder())
net0_dict = self.net0.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_h1esc.net0.statedict", map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {key: pretrained_dict["module." + key] for key in net0_dict}
self.net0.load_state_dict(pretrained_dict_filtered)
self.net1 = nn.DataParallel(Encoder2())
net1_dict = self.net1.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_h1esc.net.statedict", map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {key: pretrained_dict[key] for key in net1_dict}
self.net1.load_state_dict(pretrained_dict_filtered)
self.net0.eval()
self.net1.eval()
self.net.eval()
self.denet_32.eval()
self.denet_64.eval()
self.denet_128.eval()
self.denet_256.eval()
self.background_cis = np.load(
ORCA_PATH + "/resources/4DNFI9GMP2J8.rebinned.mcool.expected.res32000.mono.npy"
)
self.background_trans = np.load(
ORCA_PATH + "/resources/4DNFI9GMP2J8.rebinned.mcool.expected.res32000.trans.npy"
)
self.background_cis = np.hstack([np.exp(self.background_cis), np.repeat(np.nan, 2000)])
self.background_trans = np.exp(self.background_trans)
self.denets = {
32: self.denet_32,
64: self.denet_64,
128: self.denet_128,
256: self.denet_256,
}
torch.set_num_threads(num_threads)
class Hff_256M(nn.Module):
"""
Orca HFF model (32-256Mb)
Attributes
----------
net0 : nn.DataParallel(Encoder)
The first section of the multi-resolution encoder
(bp resolution to 4kb resolution).
net1 : nn.DataParallel(Encoder2)
The second section of the multi-resolution encoder
(4kb resolution to 128kb resolution).
net : nn.DataParallel(Encoder3)
The third section of the multi-resolution encoder
(128kb resolution to 1024kb resolution).
denets : dict(int: nn.DataParallel(Decoder))
Decoders at each level, which are stored in a dictionary
with an integer as key.
normmats : dict(int: numpy.ndarray)
The distance-based background matrices with expected log
fold over background values at each level.
epss : dict(int: float)
The minimum background value at each level. Used for
stablizing the log fold computation by adding
to both the nominator and the denominator.
"""
def __init__(self):
super(Hff_256M, self).__init__()
modelstr = "hff_256m"
self.net = nn.DataParallel(Encoder3())
self.denet_32 = nn.DataParallel(Decoder())
self.denet_64 = nn.DataParallel(Decoder())
self.denet_128 = nn.DataParallel(Decoder())
self.denet_256 = nn.DataParallel(Decoder())
num_threads = torch.get_num_threads()
self.net.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".net.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_32.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d32.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_64.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d64.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_128.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d128.statedict",
map_location=torch.device("cpu"),
)
)
self.denet_256.load_state_dict(
torch.load(
ORCA_PATH + "/models/orca_" + modelstr + ".d256.statedict",
map_location=torch.device("cpu"),
)
)
self.net0 = nn.DataParallel(Encoder())
net0_dict = self.net0.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_hff.net0.statedict", map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {key: pretrained_dict["module." + key] for key in net0_dict}
self.net0.load_state_dict(pretrained_dict_filtered)
self.net1 = nn.DataParallel(Encoder2())
net1_dict = self.net1.state_dict()
pretrained_dict = torch.load(
ORCA_PATH + "/models/orca_hff.net.statedict", map_location=torch.device("cpu"),
)
pretrained_dict_filtered = {key: pretrained_dict[key] for key in net1_dict}
self.net1.load_state_dict(pretrained_dict_filtered)
self.net0.eval()
self.net1.eval()
self.net.eval()
self.denet_32.eval()
self.denet_64.eval()
self.denet_128.eval()
self.denet_256.eval()
self.background_cis = np.load(
ORCA_PATH + "/resources/4DNFI643OYP9.rebinned.mcool.expected.res32000.mono.npy"
)
self.background_trans = np.load(
ORCA_PATH + "/resources/4DNFI643OYP9.rebinned.mcool.expected.res32000.trans.npy"
)
self.background_cis = np.hstack([np.exp(self.background_cis), np.repeat(np.nan, 2000)])
self.background_trans = np.exp(self.background_trans)
self.denets = {
32: self.denet_32,
64: self.denet_64,
128: self.denet_128,
256: self.denet_256,
}
torch.set_num_threads(num_threads)
| 36.370565
| 99
| 0.582737
| 3,354
| 27,678
| 4.622242
| 0.060227
| 0.06502
| 0.039476
| 0.046056
| 0.967103
| 0.96104
| 0.95775
| 0.952977
| 0.949494
| 0.943043
| 0
| 0.05709
| 0.297529
| 27,678
| 760
| 100
| 36.418421
| 0.740266
| 0.184587
| 0
| 0.749077
| 0
| 0
| 0.087245
| 0.034525
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016605
| false
| 0
| 0.009225
| 0
| 0.042435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2e56667aef801f5791c4102307a87a0908be84f
| 517
|
py
|
Python
|
test_harness.py
|
sergioisidoro/simple-ruuvitag
|
791645797c211a7f9506e2c496289d8890b0035f
|
[
"MIT"
] | 5
|
2020-04-14T16:11:23.000Z
|
2021-05-30T13:30:14.000Z
|
test_harness.py
|
sergioisidoro/simple-ruuvitag
|
791645797c211a7f9506e2c496289d8890b0035f
|
[
"MIT"
] | 3
|
2020-06-27T15:54:07.000Z
|
2021-03-28T17:47:59.000Z
|
test_harness.py
|
sergioisidoro/simple-ruuvitag
|
791645797c211a7f9506e2c496289d8890b0035f
|
[
"MIT"
] | 2
|
2021-02-20T05:12:43.000Z
|
2021-07-11T01:24:47.000Z
|
import time
from simple_ruuvitag.ruuvi import RuuviTagClient
ruuvi_client = RuuviTagClient()
ruuvi_client.start()
time.sleep(20)
last_datas = ruuvi_client.get_current_datas()
print(last_datas)
ruuvi_client.rescan()
time.sleep(20)
last_datas = ruuvi_client.get_current_datas()
print(last_datas)
# ruuvi_client.rescan()
# time.sleep(20)
# last_datas = ruuvi_client.get_current_datas()
# print(last_datas)
# ruuvi_client.rescan()
# time.sleep(20)
# last_datas = ruuvi_client.get_current_datas()
# print(last_datas)
| 20.68
| 48
| 0.794971
| 75
| 517
| 5.133333
| 0.226667
| 0.257143
| 0.254545
| 0.363636
| 0.755844
| 0.755844
| 0.755844
| 0.755844
| 0.755844
| 0.755844
| 0
| 0.016985
| 0.088975
| 517
| 25
| 49
| 20.68
| 0.800425
| 0.388781
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.181818
| 0
| 0.181818
| 0.181818
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
652d994995ddae28937c8c9258e3dbaea9e5adaf
| 133
|
py
|
Python
|
pyintesishome/__init__.py
|
tehbrd/pyIntesisHome
|
e8121fc61f583303b3b14dd7de5a26cc57f68e3d
|
[
"MIT"
] | null | null | null |
pyintesishome/__init__.py
|
tehbrd/pyIntesisHome
|
e8121fc61f583303b3b14dd7de5a26cc57f68e3d
|
[
"MIT"
] | null | null | null |
pyintesishome/__init__.py
|
tehbrd/pyIntesisHome
|
e8121fc61f583303b3b14dd7de5a26cc57f68e3d
|
[
"MIT"
] | null | null | null |
from .pyintesishome import IntesisHome
from .pyintesishome import IHAuthenticationError
from .pyintesishome import IHConnectionError
| 33.25
| 48
| 0.887218
| 12
| 133
| 9.833333
| 0.5
| 0.432203
| 0.584746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090226
| 133
| 3
| 49
| 44.333333
| 0.975207
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3311fcced297c478a6da6412629756f64726921b
| 5,229
|
py
|
Python
|
pythonwars/engine/levels.py
|
jamiees2/PythonWars
|
c815440228bffd0f2c45e4c05de33d3f098096f6
|
[
"MIT"
] | null | null | null |
pythonwars/engine/levels.py
|
jamiees2/PythonWars
|
c815440228bffd0f2c45e4c05de33d3f098096f6
|
[
"MIT"
] | null | null | null |
pythonwars/engine/levels.py
|
jamiees2/PythonWars
|
c815440228bffd0f2c45e4c05de33d3f098096f6
|
[
"MIT"
] | null | null | null |
from .world import World
from .coin import Coin
from .robot import Robot
from .portal import Portal
from .crate import Crate
from .plate import Plate
MODE_REGULAR = 1
MODE_INVISIBLE = 2
def level1():
"""Can you collect all of the coins?"""
maze = [[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2],
[2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2],
[2, 1, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 1, 2],
[2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2],
[2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2],
[2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2],
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]]
world = World(maze)
world.create_object(Coin("Coin1"), 1, 1)
world.create_object(Coin("Coin2"), 8, 1)
world.create_object(Plate('plate1'), 3, 2)
world.create_object(Crate('crate2'), 1, 3)
robot = Robot("Robot1")
world.create_object(robot, 1, 2, static=False)
world.tick()
return {"robot": robot, "world": world, "coins": 2}
def level2():
"""Oh no! Someone turned off the lights. Can you still collect all the coins? It's harder than it looks!"""
maze = [[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2],
[2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2],
[2, 1, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 1, 2],
[2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2],
[2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2],
[2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2],
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]]
world = World(maze)
world.create_object(Coin("Coin1"), 1, 1)
world.create_object(Coin("Coin2"), 4, 1)
robot = Robot("Robot1")
robot = Robot("Robot1")
robot._hidden = True
world.create_object(robot, 1, 2, static=False)
world.tick()
return {"robot": robot, "world": world, "coins": 2, "mode": MODE_INVISIBLE}
def level3():
"""Now you're thinking with portals. Can you collect all the coins?"""
maze = [[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2],
[2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 1, 2],
[2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2],
[2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2],
[2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2],
[2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2],
[2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2],
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]]
world = World(maze)
world.create_object(Coin("Coin1"), 9, 7)
world.create_object(Coin("Coin2"), 9, 9)
port1 = Portal("port1")
port2 = Portal("port2")
port1.matching_portal = port2
port2.matching_portal = port1
world.create_object(port1, 1, 6)
world.create_object(port2, 9, 8)
world.create_object(Plate('plate1'), 2, 2)
world.create_object(Crate('crate1'), 9, 6)
world.create_object(Crate('crate2'), 1, 3)
robot = Robot("Robot1")
world.create_object(robot, 1, 2, static=False)
world.tick()
return {"robot": robot, "world": world, "coins": 2}
levels = {}
levels['1'] = level1
levels['2'] = level2
levels['3'] = level3
level_list = sorted(levels.keys())
| 43.214876
| 111
| 0.410212
| 1,174
| 5,229
| 1.807496
| 0.059625
| 0.28181
| 0.309614
| 0.337418
| 0.721018
| 0.671065
| 0.671065
| 0.671065
| 0.671065
| 0.671065
| 0
| 0.282596
| 0.342895
| 5,229
| 120
| 112
| 43.575
| 0.334983
| 0.038248
| 0
| 0.621053
| 0
| 0
| 0.029124
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031579
| false
| 0
| 0.063158
| 0
| 0.126316
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
684e1fd4e48f24e1fb978d8096e929bc15e3b343
| 224
|
py
|
Python
|
ixcom/data.py
|
imar-navigation/ixcom-python
|
4c6d6de65f7942fb65b8a21d62e1be9767b48c30
|
[
"MIT"
] | 4
|
2019-08-12T13:20:55.000Z
|
2021-12-20T09:48:36.000Z
|
ixcom/data.py
|
imar-navigation/ixcom-python
|
4c6d6de65f7942fb65b8a21d62e1be9767b48c30
|
[
"MIT"
] | 1
|
2020-04-23T19:04:49.000Z
|
2020-04-23T19:04:49.000Z
|
ixcom/data.py
|
imar-navigation/ixcom-python
|
4c6d6de65f7942fb65b8a21d62e1be9767b48c30
|
[
"MIT"
] | 4
|
2019-08-13T09:36:54.000Z
|
2022-01-21T12:58:34.000Z
|
from .protocol import *
from .commands import *
from .parameters import *
from .messages import *
try:
from ixcom_internal.parameters import *
from ixcom_internal.messages import *
except ImportError as e:
pass
| 20.363636
| 43
| 0.745536
| 28
| 224
| 5.892857
| 0.5
| 0.242424
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191964
| 224
| 10
| 44
| 22.4
| 0.911602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.111111
| 0.777778
| 0
| 0.777778
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
6860d03ce4022e4d3b562aa92003fd4fc667ec7a
| 173
|
py
|
Python
|
ninjalooter/tests/test_config.py
|
ewjax/ninjalooter
|
9faf94eed56de3c375cca4d1be24eab20dc7b226
|
[
"MIT"
] | 4
|
2020-08-30T12:57:03.000Z
|
2022-03-18T15:11:13.000Z
|
ninjalooter/tests/test_config.py
|
ewjax/ninjalooter
|
9faf94eed56de3c375cca4d1be24eab20dc7b226
|
[
"MIT"
] | 5
|
2022-01-07T03:17:32.000Z
|
2022-03-27T21:20:12.000Z
|
ninjalooter/tests/test_config.py
|
ewjax/ninjalooter
|
9faf94eed56de3c375cca4d1be24eab20dc7b226
|
[
"MIT"
] | 1
|
2021-12-28T02:18:04.000Z
|
2021-12-28T02:18:04.000Z
|
from ninjalooter import config # noqa
from ninjalooter.tests import base
# TODO: placeholder class, will config need testing?
class TestConfig(base.NLTestBase):
pass
| 21.625
| 52
| 0.780347
| 22
| 173
| 6.136364
| 0.727273
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16185
| 173
| 7
| 53
| 24.714286
| 0.931034
| 0.317919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
6882f9c68eee2efc4044ca834132d838b67d2e19
| 144
|
py
|
Python
|
sosia/processing/caching/__init__.py
|
sosia-dev/sosia
|
d4d2d5edb0cd1d085b5a457eb6d19bf8e9fea7f5
|
[
"MIT"
] | 14
|
2019-03-12T22:07:47.000Z
|
2022-03-08T14:05:05.000Z
|
sosia/processing/caching/__init__.py
|
sosia-dev/sosia
|
d4d2d5edb0cd1d085b5a457eb6d19bf8e9fea7f5
|
[
"MIT"
] | 31
|
2018-10-15T16:02:44.000Z
|
2021-04-09T08:13:44.000Z
|
sosia/processing/caching/__init__.py
|
sosia-dev/sosia
|
d4d2d5edb0cd1d085b5a457eb6d19bf8e9fea7f5
|
[
"MIT"
] | 2
|
2020-01-09T06:47:09.000Z
|
2020-12-05T13:21:03.000Z
|
from sosia.processing.caching.inserting import *
from sosia.processing.caching.retrieving import *
from sosia.processing.caching.utils import *
| 36
| 49
| 0.833333
| 18
| 144
| 6.666667
| 0.444444
| 0.225
| 0.475
| 0.65
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 144
| 3
| 50
| 48
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
688ee3b8c8472ed6db724f15989571f1689921f6
| 76
|
py
|
Python
|
xappt_qt/plugins/__init__.py
|
cmontesano/xappt_qt
|
74f8c62e0104a67b4b4eb65382df851221bf0bab
|
[
"MIT"
] | null | null | null |
xappt_qt/plugins/__init__.py
|
cmontesano/xappt_qt
|
74f8c62e0104a67b4b4eb65382df851221bf0bab
|
[
"MIT"
] | 12
|
2020-10-11T22:42:12.000Z
|
2021-10-04T19:38:51.000Z
|
xappt_qt/plugins/__init__.py
|
cmontesano/xappt_qt
|
74f8c62e0104a67b4b4eb65382df851221bf0bab
|
[
"MIT"
] | 1
|
2021-09-29T23:53:34.000Z
|
2021-09-29T23:53:34.000Z
|
import xappt_qt.plugins.interfaces.qt
from xappt_qt.plugins.tools import *
| 19
| 37
| 0.828947
| 12
| 76
| 5.083333
| 0.583333
| 0.229508
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092105
| 76
| 3
| 38
| 25.333333
| 0.884058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
689651dc76f5345adda2a00efcce0794ac630443
| 590
|
py
|
Python
|
run.py
|
IntiTutorial/TERMUXMOD
|
2142fe1a89c3bb452627726775834e4aa81b8f7a
|
[
"Apache-2.0"
] | null | null | null |
run.py
|
IntiTutorial/TERMUXMOD
|
2142fe1a89c3bb452627726775834e4aa81b8f7a
|
[
"Apache-2.0"
] | null | null | null |
run.py
|
IntiTutorial/TERMUXMOD
|
2142fe1a89c3bb452627726775834e4aa81b8f7a
|
[
"Apache-2.0"
] | 2
|
2020-02-07T07:26:03.000Z
|
2020-02-17T12:47:26.000Z
|
#Compiled By ./HEZALA
#Github : https://github.com/IntiTutorial
import marshal
exec(marshal.loads('''c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00s)\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00e\x00\x00j\x02\x00d\x02\x00\x83\x01\x00\x01d\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0b\x00\x00\x00bash run.sh(\x03\x00\x00\x00t\x02\x00\x00\x00ost\x03\x00\x00\x00syst\x06\x00\x00\x00system(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x07\x00\x00\x00<febry>t\x08\x00\x00\x00<module>\x08\x00\x00\x00s\x02\x00\x00\x00\x18\x01'''))
| 147.5
| 511
| 0.752542
| 123
| 590
| 3.609756
| 0.341463
| 0.472973
| 0.425676
| 0.405405
| 0.274775
| 0.274775
| 0.128378
| 0.128378
| 0.074324
| 0.074324
| 0
| 0.342513
| 0.015254
| 590
| 4
| 511
| 147.5
| 0.421687
| 0.101695
| 0
| 0
| 0
| 0.5
| 0.914934
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
d7a7afcf4ae1b585771e56af3fd72cee9811b95a
| 8,041
|
py
|
Python
|
test/test_pb_remove.py
|
pandorabots/pb-cli
|
06bb675399f196df2d460a7399e0209324b957a5
|
[
"BSD-2-Clause"
] | 34
|
2015-06-07T03:36:52.000Z
|
2021-04-14T12:49:22.000Z
|
test/test_pb_remove.py
|
pandorabots/pb-cli
|
06bb675399f196df2d460a7399e0209324b957a5
|
[
"BSD-2-Clause"
] | 17
|
2015-02-27T00:50:31.000Z
|
2021-08-11T09:10:04.000Z
|
test/test_pb_remove.py
|
pandorabots/pb-cli
|
06bb675399f196df2d460a7399e0209324b957a5
|
[
"BSD-2-Clause"
] | 15
|
2015-05-29T16:31:49.000Z
|
2020-12-08T05:42:08.000Z
|
import unittest
import subprocess
import json
import os
import util
import time
from TestConfig import *
config = {}
test_env = os.getenv('test_env', 'aiaas')
env_setup = TestConfig()
config = env_setup.setEnvironment(test_env)
cli = os.path.abspath('./pb-cli/index.js')
class TestPBRemove(unittest.TestCase):
@classmethod
def setUpClass(self):
self.util = util.TestUtil()
self.util.announce_test_block('pb remove')
self.hostname = config["hostname"]
print self.hostname
def setUp(self):
self.util.create_and_compile()
def test_remove_aiml_file(self):
self.util.it('removes an aiml file from the bot.')
bot_files = self.util.get_file_list()
self.assertTrue("test.aiml" in bot_files)
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'test.aiml'
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
result.communicate(input='yes')
time.sleep(1)
bot_files = self.util.get_file_list()
self.assertFalse("test.aiml" in bot_files)
def test_remove_map_file(self):
self.util.it('removes a map file from the bot.')
bot_files = self.util.get_file_list()
self.assertTrue("test.map" in bot_files)
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'test.map'
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
result.communicate(input='yes')
time.sleep(1)
bot_files = self.util.get_file_list()
self.assertFalse("test.map" in bot_files)
def test_remove_set_file(self):
self.util.it('removes a set file from the bot.')
bot_files = self.util.get_file_list()
self.assertTrue("test.set" in bot_files)
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'test.set'
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
result.communicate(input='yes')
time.sleep(1)
bot_files = self.util.get_file_list()
self.assertFalse("test.set" in bot_files)
def test_remove_substitution_file(self):
self.util.it('removes a substitution file from the bot.')
bot_files = self.util.get_file_list()
self.assertTrue("test.substitution" in bot_files)
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'test.substitution'
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
result.communicate(input='yes')
time.sleep(1)
bot_files = self.util.get_file_list()
self.assertFalse("test.substitution" in bot_files)
def test_remove_pdefaults_file(self):
self.util.it('removes a pdefaults file from the bot.')
bot_files = self.util.get_file_list()
self.assertTrue("testbot.pdefaults" in bot_files)
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'testbot.pdefaults'
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
result.communicate(input='yes')
time.sleep(1)
bot_files = self.util.get_file_list()
self.assertFalse("testbot.pdefaults" in bot_files)
def test_remove_properties_file(self):
self.util.it('removes a properties file from the bot.')
bot_files = self.util.get_file_list()
self.assertTrue("testbot.properties" in bot_files)
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'testbot.properties'
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
result.communicate(input='yes')
time.sleep(1)
bot_files = self.util.get_file_list()
self.assertFalse("testbot.properties" in bot_files)
def test_invalid_botName(self):
self.util.it('returns 400 if the botname is invalid.')
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', 'ABCDEFG',
'--hostname', self.hostname,
'test.aiml',
'--yes'
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.assertTrue('400' in result.stdout.read())
def test_bot_not_found(self):
self.util.it('returns 412 if the bot does not exist.')
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', '12345',
'--hostname', self.hostname,
'test.aiml',
'--yes'
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.assertTrue('412' in result.stdout.read())
def test_file_not_found(self):
self.util.it('returns 412 if the file does not exist.')
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'fake.aiml',
'--yes'
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.assertTrue('412' in result.stdout.read())
def test_invalid_userKey(self):
self.util.it('returns 401 if the user_key is invalid.')
result = subprocess.Popen([
cli, 'remove',
'--app_id', config['appId'],
'--user_key', '12345',
'--botname', config['botName'],
'--hostname', self.hostname,
'test.aiml',
'--yes'
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.assertTrue('401' in result.stdout.read())
def test_invalid_appId(self):
self.util.it('returns 401 if the app_id is invalid.')
result = subprocess.Popen([
cli, 'remove',
'--app_id', '12345',
'--user_key', config['userKey'],
'--botname', config['botName'],
'--hostname', self.hostname,
'test.aiml',
'--yes'
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.assertTrue('401' in result.stdout.read())
def tearDown(self):
self.util.delete_bot()
if __name__ == "__main__":
unittest.main()
| 31.410156
| 65
| 0.53165
| 824
| 8,041
| 5.042476
| 0.115291
| 0.051986
| 0.040433
| 0.046209
| 0.837304
| 0.808424
| 0.768712
| 0.733093
| 0.719134
| 0.708544
| 0
| 0.009538
| 0.335033
| 8,041
| 255
| 66
| 31.533333
| 0.767533
| 0
| 0
| 0.660287
| 0
| 0
| 0.183311
| 0
| 0
| 0
| 0
| 0
| 0.08134
| 0
| null | null | 0
| 0.033493
| null | null | 0.004785
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7bb5f77b9313ea88c4f648f61aad3b54432d571
| 569,189
|
py
|
Python
|
djangoSIGE/static/Downloads/0002_popula_estado_e_municipio.py
|
dayvidallan/mercadinho22h
|
0f67bdb545978543d72ab4b9b16e1d33ecec6293
|
[
"MIT"
] | null | null | null |
djangoSIGE/static/Downloads/0002_popula_estado_e_municipio.py
|
dayvidallan/mercadinho22h
|
0f67bdb545978543d72ab4b9b16e1d33ecec6293
|
[
"MIT"
] | null | null | null |
djangoSIGE/static/Downloads/0002_popula_estado_e_municipio.py
|
dayvidallan/mercadinho22h
|
0f67bdb545978543d72ab4b9b16e1d33ecec6293
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('base', '0001_initial'),
]
operations = [
migrations.RunSQL('''
INSERT INTO base_estado (id, nome, sigla) VALUES (5, 'ACRE', 'AC');
INSERT INTO base_estado (id, nome, sigla) VALUES (6, 'ALAGOAS', 'AL');
INSERT INTO base_estado (id, nome, sigla) VALUES (8, 'AMAPA', 'AP');
INSERT INTO base_estado (id, nome, sigla) VALUES (7, 'AMAZONAS', 'AM');
INSERT INTO base_estado (id, nome, sigla) VALUES (9, 'BAHIA', 'BA');
INSERT INTO base_estado (id, nome, sigla) VALUES (10, 'CEARA', 'CE');
INSERT INTO base_estado (id, nome, sigla) VALUES (12, 'ESPIRITO SANTO', 'ES');
INSERT INTO base_estado (id, nome, sigla) VALUES (13, 'GOIAS', 'GO');
INSERT INTO base_estado (id, nome, sigla) VALUES (14, 'MARANHAO', 'MA');
INSERT INTO base_estado (id, nome, sigla) VALUES (17, 'MATO GROSSO', 'MT');
INSERT INTO base_estado (id, nome, sigla) VALUES (16, 'MATO GROSSO DO SUL', 'MS');
INSERT INTO base_estado (id, nome, sigla) VALUES (15, 'MINAS GERAIS', 'MG');
INSERT INTO base_estado (id, nome, sigla) VALUES (18, 'PARA', 'PA');
INSERT INTO base_estado (id, nome, sigla) VALUES (19, 'PARAIBA', 'PB');
INSERT INTO base_estado (id, nome, sigla) VALUES (22, 'PARANA', 'PR');
INSERT INTO base_estado (id, nome, sigla) VALUES (3, 'Pernambuco', 'PE');
INSERT INTO base_estado (id, nome, sigla) VALUES (21, 'PIAUI', 'PI');
INSERT INTO base_estado (id, nome, sigla) VALUES (4, 'Rio de Janeiro', 'RJ');
INSERT INTO base_estado (id, nome, sigla) VALUES (1, 'Rio Grande do Norte', 'RN');
INSERT INTO base_estado (id, nome, sigla) VALUES (2, 'Rio Grande do Sul', 'RS');
INSERT INTO base_estado (id, nome, sigla) VALUES (25, 'RONDONIA', 'RO');
INSERT INTO base_estado (id, nome, sigla) VALUES (26, 'RORAIMA', 'RR');
INSERT INTO base_estado (id, nome, sigla) VALUES (28, 'SANTA CATARINA', 'SC');
INSERT INTO base_estado (id, nome, sigla) VALUES (30, 'SAO PAULO', 'SP');
INSERT INTO base_estado (id, nome, sigla) VALUES (29, 'SERGIPE', 'SE');
INSERT INTO base_estado (id, nome, sigla) VALUES (31, 'TOCANTINS', 'TO');
INSERT INTO base_estado (id, nome, sigla) VALUES (11, 'DISTRITO FEDERAL', 'DF');
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3349, '530010', 'BRASILIA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3350, '530020', 'BRAZLANDIA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5216, '530030', 'CANDANGOLANDIA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5217, '530040', 'CEILANDIA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3351, '530050', 'CRUZEIRO', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5218, '530060', 'GAMA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5219, '530070', 'GUARA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3352, '530080', 'LAGO NORTE', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5220, '530090', 'LAGO SUL', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3353, '530100', 'NUCLEO BANDEIRANTE', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5221, '530110', 'PARANOA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5222, '530120', 'PLANALTINA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5223, '530130', 'RECANTO DAS EMAS', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3354, '530135', 'RIACHO FUNDO', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5224, '530140', 'SAMAMBAIA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5225, '530150', 'SANTA MARIA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3355, '530160', 'SAO SEBASTIAO', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5226, '530170', 'SOBRADINHO', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3156, '530180', 'TAGUATINGA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (564, '250590', 'EMAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5322, '521310', 'MINEIROS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2937, '291992', 'MADRE DE DEUS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5214, '530005', 'BRASILIA - ASA NORTE', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5215, '530015', 'BRASILIA - ASA SUL', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5229, '999999', 'SAS', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5230, '222222', 'DRAC/CGSOS', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2855, '130353', 'PRESIDENTE FIGUEIREDO', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5562, '888888', 'DAB', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5614, '539901', 'AGUAS CLARAS', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5615, '539913', 'PARK WAY', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5616, '539917', 'RIACHO FUNDO II', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5617, '539922', 'SOBRADINHO II', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5618, '539925', 'VARJAO', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5619, '539928', 'ITAPOA', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5620, '539929', 'SCIA-ESTRUTURAL', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5621, '539930', 'JARDIM BOTANICO', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5622, '539931', 'SUDOESTE/OCTOGONAL', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5623, '539933', 'SETOR DE INDUSTRIA E ABASTECIMENTO', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5624, '539934', 'VICENTE PIRES', 11);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (690, '130170', 'HUMAITA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (474, '420207', 'BALNEARIO GAIVOTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (366, '220400', 'FRANCINOPOLIS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1114, '270570', 'OLHO D''AGUA DAS FLORES', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3657, '430105', 'ARROIO DO SAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2096, '130020', 'ATALAIA DO NORTE', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (360, '220330', 'DEMERVAL LOBAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1558, '330025', 'ARRAIAL DO CABO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5522, '355190', 'SEVERINIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3721, '431780', 'SANTO AUGUSTO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5109, '430587', 'CORONEL BARROS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4770, '431861', 'SAO JOSE DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2667, '330555', 'SEROPEDICA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1662, '330515', 'SAO JOSE DO VALE DO RIO PRETO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (413, '316850', 'TEIXEIRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1540, '316500', 'SAO TIAGO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1537, '316292', 'SAO JOAQUIM DE BICAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5457, '500600', 'NOVA ALVORADA DO SUL', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2088, '314400', 'MUTUM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5453, '500380', 'FATIMA DO SUL', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (759, '292575', 'PRESIDENTE TANCREDO NEVES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3149, '510718', 'RIBEIRAO CASCALHEIRA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4909, '500090', 'ANTONIO JOAO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3009, '410750', 'ENGENHEIRO BELTRAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (884, '412785', 'TRES BARRAS DO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (563, '250570', 'DONA INES', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2310, '412265', 'ROSARIO DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2727, '354980', 'SAO JOSE DO RIO PRETO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4286, '312695', 'FREI LAGONEGRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3978, '310610', 'BELMIRO BRAGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5580, '315340', 'PRESIDENTE OLEGARIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2091, '315550', 'RIO PARANAIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5449, '500190', 'BATAGUASSU', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4320, '510642', 'PEIXOTO DE AZEVEDO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (371, '220460', 'HUGO NAPOLEAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1247, '311550', 'CAXAMBU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1299, '410800', 'FLORESTOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3665, '430235', 'BOM PRINCIPIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1525, '315530', 'RIO MANSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1410, '314670', 'PALMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4255, '312125', 'DELTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (405, '311750', 'CONCEICAO DO MATO DENTRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5014, '315740', 'SANTA CRUZ DO ESCALVADO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (294, '310030', 'ABRE CAMPO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2273, '411720', 'NOVA OLIMPIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5541, '412220', 'RIO BRANCO DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3334, '430240', 'BOM RETIRO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3153, '521390', 'MOSSAMEDES', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3882, '315570', 'RIO PIRACICABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4550, '500740', 'RIO VERDE DE MATO GROSSO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (378, '220550', 'JOSE DE FREITAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4779, '432055', 'SERTAO SANTANA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (418, '320290', 'ITARANA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2438, '312450', 'ESTIVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1771, '317065', 'VARGEM GRANDE DO RIO PARDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1543, '316770', 'SOBRALIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2537, '221000', 'SAO JOAO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2354, '241200', 'SAO GONCALO DO AMARANTE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3950, '130280', 'MARAA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5006, '313835', 'LEME DO PRADO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3962, '310285', 'ANGELANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (873, '412600', 'SAO SEBASTIAO DA AMOREIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3315, '430064', 'AMETISTA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1798, '320255', 'IBITIRAMA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (315, '211195', 'SUCUPIRA DO RIACHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5243, '311265', 'CAPITAO ANDRADE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (779, '312280', 'DOM VICOSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (39, '220820', 'PIO IX', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4864, '292570', 'PRESIDENTE JANIO QUADROS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1239, '311070', 'CAMBUQUIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1350, '412217', 'RIO BRANCO DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (971, '291300', 'IBITIARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1720, '312890', 'GUIMARANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4572, '316020', 'SANTO ANTONIO DO ITAMBE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4821, '290327', 'BARROCAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (685, '130100', 'CARAUARI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2877, '210900', 'PORTO FRANCO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (669, '120010', 'BRASILEIA', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4496, '311510', 'CASSIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1616, '315240', 'POTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1606, '315050', 'PIMENTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2347, '312990', 'IBITIURA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2978, '293000', 'SEBASTIAO LARANJEIRAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2073, '430140', 'ARVOREZINHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (409, '313110', 'INIMUTABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5282, '520520', 'CATURAI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2196, '320425', 'PONTO BELO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2376, '260780', 'ITAQUITINGA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4203, '210455', 'GOVERNADOR EDISON LOBAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2092, '316100', 'SAO DOMINGOS DO PRATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2648, '330250', 'MAGE', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5318, '521270', 'MAMBAI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2980, '293030', 'SERRA DOURADA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2933, '291720', 'ITUACU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3027, '260090', 'AMARAJI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2190, '320170', 'CONCEICAO DO CASTELO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3015, '410785', 'FLOR DA SERRA DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2421, '311780', 'CONCEICAO DOS OUROS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3092, '261360', 'SAO JOSE DO EGITO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2967, '292860', 'SANTO AMARO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3240, '421005', 'MACIEIRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4882, '293345', 'WANDERLEY', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3462, '351170', 'CHARQUEADA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2993, '293260', 'URANDI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (799, '320340', 'MIMOSO DO SUL', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3680, '431390', 'PANAMBI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4120, '210320', 'CHAPADINHA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1792, '320180', 'DIVINO DE SAO LOURENCO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4021, '421410', 'PRESIDENTE NEREU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1259, '312060', 'CRUCILANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1195, '231370', 'UMARI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4312, '353380', 'OLEO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4986, '130420', 'TEFE', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4520, '432290', 'VIADUTOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (976, '291400', 'IPIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4509, '311700', 'COMERCINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2870, '210005', 'ACAILANDIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4708, '430865', 'GARRUCHOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2977, '292990', 'SEABRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4836, '291130', 'GENTIO DO OURO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1856, '291810', 'JEREMOABO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4152, '210710', 'MORROS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5134, '431349', 'NOVO BARREIRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5051, '316290', 'SAO JOAO NEPOMUCENO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5320, '521300', 'MAURILANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1593, '314820', 'PATROCINIO DO MURIAE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5391, '431046', 'IPIRANGA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5475, '520480', 'CAMPO ALEGRE DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3342, '522050', 'SERRANOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1839, '290685', 'CAPELA DO ALTO ALEGRE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (762, '292870', 'SANTO ANTONIO DE JESUS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1076, '251320', 'SANTA CRUZ', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1843, '290980', 'CRUZ DAS ALMAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5552, '500370', 'DOURADOS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5021, '315850', 'SANTANA DE PIRAPAMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (464, '412330', 'SANTA CRUZ DE MONTE CASTELO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4290, '352130', 'IPUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (239, '231340', 'TIANGUA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4302, '352840', 'MAIRINQUE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4354, '520690', 'DAVINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2391, '270642', 'PARICONHA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4432, '220250', 'CARACOL', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2699, '350500', 'BARAO DE ANTONINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1444, '354440', 'RUBIACEA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3910, '312230', 'DIVINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4698, '430673', 'DOUTOR MAURICIO CARDOSO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4246, '311990', 'CORREGO DO BOM JESUS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (696, '130260', 'MANAUS', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (302, '211100', 'SAO JOAO BATISTA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4665, '231123', 'POTIRETAMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1383, '314260', 'MONSENHOR PAULO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (988, '291660', 'ITAPITANGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4220, '210923', 'PRESIDENTE MEDICI', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5011, '313910', 'MADRE DE DEUS DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1660, '330414', 'QUEIMADOS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1826, '290115', 'AMERICA DOURADA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5162, '510627', 'NOVO HORIZONTE DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (172, '353657', 'PAULISTANIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5549, '430900', 'GIRUA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (253, '241350', 'SERRINHA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (372, '220500', 'ITAINOPOLIS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3189, '410715', 'DIAMANTE D''OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5565, '353490', 'PACAEMBU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1427, '354110', 'PRESIDENTE ALVES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4715, '431020', 'IJUI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3452, '350995', 'CANAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3743, '432000', 'SAPUCAIA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (350, '220255', 'CARIDADE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3320, '430110', 'ARROIO DOS RATOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4211, '210630', 'MAGALHAES DE ALMEIDA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (52, '220975', 'SAO GONCALO DO GURGUEIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5440, '430632', 'DERRUBADAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1591, '220840', 'PIRIPIRI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4102, '210120', 'BACABAL', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4187, '210030', 'ALDEIAS ALTAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4147, '210650', 'MATINHA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4399, '430463', 'CAPAO DA CANOA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2222, '410940', 'GUARAPUAVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (425, '350040', 'AGUAS DA PRATA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4533, '432350', 'VISTA ALEGRE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2037, '220965', 'SAO FRANCISCO DE ASSIS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1799, '320265', 'IRUPI', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3267, '421950', 'XANXERE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3226, '420510', 'DONA EMMA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1150, '354425', 'ROSANA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2481, '220005', 'ACAUA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (705, '130426', 'UARINI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4174, '211023', 'SANTANA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3916, '351930', 'IBATE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1200, '240030', 'AFONSO BEZERRA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1905, '250990', 'NATUBA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2761, '355620', 'VALINHOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5124, '431065', 'ITATI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4192, '210177', 'BELA VISTA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1889, '171884', 'SANDOLANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (330, '220105', 'ASSUNCAO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5142, '241255', 'SAO MIGUEL DO GOSTOSO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2028, '316300', 'SAO JOSE DA SAFIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3718, '431750', 'SANTO ANGELO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4019, '421370', 'POUSO REDONDO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1858, '291875', 'LAGOA REAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4022, '421420', 'QUILOMBO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3505, '351940', 'IBIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2984, '293110', 'TANQUINHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4215, '210720', 'NINA RODRIGUES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3818, '350280', 'ARACATUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1377, '314170', 'MESQUITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2329, '270400', 'JUNQUEIRO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (830, '350860', 'CACHOEIRA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3920, '352610', 'JUQUIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (113, '230640', 'ITAPIPOCA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (200, '210250', 'CAJARI', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3430, '170370', 'BREJINHO DE NAZARE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3921, '352780', 'LUPERCIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3784, '353800', 'PINDAMONHANGABA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4134, '210500', 'HUMBERTO DE CAMPOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3956, '310180', 'ALPERCATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5067, '316530', 'SAO VICENTE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1847, '291250', 'IBIPITANGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1264, '312420', 'ESPERA FELIZ', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5249, '310710', 'BOA ESPERANCA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1863, '292240', 'MUTUIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2848, '230870', 'MORADA NOVA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5017, '315790', 'SANTA MARGARIDA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4933, '500640', 'PEDRO GOMES', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2461, '270840', 'SAO JOSE DA TAPERA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1964, '261300', 'SAO BENTO DO UNA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4243, '311950', 'CORONEL MURTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3976, '310590', 'BARROSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (893, '412870', 'VITORINO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (47, '220935', 'SANTANA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2784, '410280', 'BELA VISTA DO PARAISO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3510, '352070', 'INDIAPORA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1457, '354870', 'SAO BERNARDO DO CAMPO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2867, '150810', 'TUCURUI', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3617, '421090', 'MODELO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1335, '411800', 'PARAISO DO NORTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2835, '270290', 'GIRAU DO PONCIANO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2180, '316910', 'TOLEDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1566, '171488', 'NOVA OLINDA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (682, '130060', 'BENJAMIN CONSTANT', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2665, '330520', 'SAO PEDRO DA ALDEIA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3840, '351260', 'CORONEL MACEDO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1126, '270780', 'ROTEIRO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2398, '310750', 'BOM JARDIM DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1963, '261260', 'SANTA MARIA DA BOA VISTA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (978, '291430', 'IRAMAIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4950, '510180', 'BARRA DO GARCAS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5035, '316070', 'SANTOS DUMONT', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1368, '314000', 'MARIANA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4881, '293325', 'VEREDA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4386, '521935', 'SANTA ISABEL', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1512, '314590', 'OURO BRANCO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2079, '431113', 'JARI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1778, '317150', 'MATHIAS LOBATO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4132, '210467', 'GOVERNADOR NUNES FREIRE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2264, '411580', 'MEDIANEIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4301, '352740', 'LUCELIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2386, '270230', 'CORURIPE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2427, '312083', 'CUPARAQUE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2128, '210180', 'BENEDITO LEITE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5107, '420460', 'CRICIUMA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3115, '352820', 'MACEDONIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1978, '313850', 'LIBERDADE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2341, '310310', 'ANTONIO PRADO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5349, '430600', 'CRISSIUMAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2684, '350210', 'ANDRADINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2983, '293105', 'TANQUE NOVO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2612, '171050', 'ITACAJA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (900, '420100', 'ANITA GARIBALDI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5212, '522205', 'VICENTINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4717, '431043', 'IPE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2563, '150730', 'SAO FELIX DO XINGU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2382, '270040', 'ATALAIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3535, '352550', 'JOANOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2766, '355715', 'ZACARIAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3030, '260150', 'BELEM DE MARIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2155, '231025', 'PARAIPABA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2903, '260940', 'MORENO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2788, '410320', 'BOM SUCESSO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5018, '315800', 'SANTA MARIA DE ITABIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4563, '510360', 'DOM AQUINO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2742, '355255', 'SUZANAPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5181, '510788', 'SERRA NOVA DOURADA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4499, '311540', 'CATAS ALTAS DA NORUEGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3176, '291240', 'IBIPEBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4992, '170950', 'GURUPI', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1099, '251610', 'SOLEDADE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (739, '150350', 'IRITUIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4123, '210375', 'DAVINOPOLIS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (28, '520552', 'COLINAS DO SUL', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (938, '420560', 'GALVAO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1988, '140015', 'BONFIM', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (674, '120039', 'PORTO WALTER', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5532, '355535', 'UBARANA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4720, '431085', 'JABOTICABA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3676, '431346', 'NOVO XINGU', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5482, '521225', 'LAGOA SANTA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3472, '351385', 'DIRCE REIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3497, '351830', 'GUARAREMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1688, '420680', 'IBICARE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2532, '291980', 'MACAUBAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3177, '312800', 'GUANHAES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1258, '312050', 'CRISTINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1612, '315170', 'POCO FUNDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (80, '230130', 'ARARIPE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (298, '310400', 'ARAXA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4280, '312600', 'FLORESTAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4940, '500790', 'SIDROLANDIA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1924, '251600', 'SOLANEA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2905, '261620', 'VERTENTES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (868, '412545', 'SAO JOSE DAS PALMEIRAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1468, '355340', 'TANABI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3200, '411575', 'MAUA DA SERRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1310, '411095', 'ITAIPULANDIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5570, '150840', 'XINGUARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2204, '330130', 'CASIMIRO DE ABREU', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4831, '290840', 'CONCEICAO DO COITE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3517, '352190', 'ITAJOBI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (816, '350300', 'ARAMINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1156, '354530', 'SALTO DE PIRAPORA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3834, '350960', 'CAMPO LIMPO PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2465, '150140', 'BELEM', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1809, '320405', 'PEDRO CANARIO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (320, '211260', 'URBANO SANTOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3977, '310600', 'BELA VISTA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3888, '315810', 'SANTA MARIA DO SALTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (912, '420250', 'BOM JARDIM DA SERRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4273, '312440', 'ESPIRITO SANTO DO DOURADO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1085, '251410', 'SAO JOAO DO TIGRE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (447, '410773', 'FERNANDES PINHEIRO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (201, '210310', 'CEDRAL', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1783, '320013', 'AGUIA BRANCA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3955, '310163', 'ALFREDO VASCONCELOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5260, '520090', 'AMORINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4014, '421290', 'PINHALZINHO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4037, '421610', 'SAO DOMINGOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3006, '310090', 'AGUAS FORMOSAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (466, '412540', 'SAO JOSE DA BOA VISTA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5236, '313520', 'JANUARIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (559, '250510', 'CUITE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1304, '410950', 'GUARAQUECABA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (546, '250290', 'BREJO DOS SANTOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1228, '293220', 'UBAITABA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (79, '230120', 'ARACOIABA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1393, '314437', 'NATALANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4204, '210470', 'GRACA ARANHA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4685, '240470', 'IPANGUACU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1760, '316880', 'TIRADENTES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3625, '421280', 'BALNEARIO PICARRAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2578, '160021', 'CUTIAS', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1363, '412610', 'SAO TOME', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4343, '520357', 'BONOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3249, '421230', 'PAULO LOPES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3589, '420350', 'CAMPO ERE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4169, '210950', 'RIACHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1318, '411342', 'LIDIANOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (275, '260040', 'AGUA PRETA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3715, '431720', 'SANTA ROSA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1955, '260950', 'NAZARE DA MATA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3019, '410850', 'GENERAL CARNEIRO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3239, '420970', 'LEBON REGIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (996, '291790', 'JANDAIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5512, '354720', 'SANTANA DA PONTE PENSA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1196, '231375', 'UMIRIM', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2607, '170740', 'ESPERANTINA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2119, '160070', 'TARTARUGALZINHO', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2657, '330415', 'QUISSAMA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1815, '280290', 'ITABAIANA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (448, '410855', 'GODOY MOREIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (149, '353240', 'NAZARE PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2130, '210580', 'LAGO DO JUNCO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3276, '430330', 'CAIBATE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3581, '420190', 'AURORA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1015, '292070', 'MARAU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (225, '220855', 'PORTO ALEGRE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (132, '230980', 'PACOTI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1795, '320230', 'GUACUI', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3722, '431800', 'SAO BORJA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4143, '210610', 'LORETO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1944, '260590', 'GAMELEIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (334, '220150', 'BATALHA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2381, '261580', 'TUPANATINGA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3336, '430260', 'BRAGA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3972, '310470', 'ATALEIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1102, '251680', 'TRIUNFO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2487, '220630', 'MIGUEL LEAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3251, '421320', 'POMERODE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5325, '521385', 'MORRO AGUDO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5104, '171865', 'RIO DA CONCEICAO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4894, '310440', 'ARGIRITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3866, '314940', 'PEDRO TEIXEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2486, '220551', 'JUAZEIRO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3047, '260480', 'CORTES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4338, '520130', 'ANICUNS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4352, '520590', 'CORUMBAIBA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (791, '316930', 'TRES CORACOES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4649, '230460', 'GENERAL SAMPAIO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4492, '311430', 'CARMO DO PARANAIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2859, '150270', 'CONCEICAO DO ARAGUAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4925, '500470', 'IVINHEMA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2260, '411530', 'MARIOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5502, '315180', 'POCOS DE CALDAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1434, '354250', 'REGINOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (152, '353290', 'NOVA EUROPA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3542, '352690', 'LIMEIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1654, '330170', 'DUQUE DE CAXIAS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1127, '270800', 'SANTANA DO IPANEMA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3520, '352240', 'ITAPEVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2780, '410200', 'ASSIS CHATEAUBRIAND', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (662, '110140', 'MONTE NEGRO', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (994, '291740', 'JACARACI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1945, '260620', 'GOIANA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (722, '150157', 'BOM JESUS DO TOCANTINS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3646, '421870', 'TUBARAO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (290, '292335', 'OUROLANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1733, '313140', 'IPIACU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4885, '310080', 'AGUANIL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2991, '293210', 'UBAIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4809, '280540', 'POCO REDONDO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5484, '521487', 'NOVA IGUACU DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3341, '522028', 'SAO PATRICIO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (461, '412110', 'QUINTA DO SOL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (644, '110001', 'ALTA FLORESTA D''OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1485, '312720', 'FUNILANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3161, '314950', 'PEQUERI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1893, '250580', 'DUAS ESTRADAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5076, '316660', 'SERRA DA SAUDADE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1777, '317120', 'VESPASIANO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3154, '521805', 'PORTEIRAO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3460, '351130', 'CEDRAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4279, '312570', 'FELIXLANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2690, '350315', 'ARAPEI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1737, '313230', 'ITAIPE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (206, '210632', 'MARACACUME', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (903, '420160', 'ARROIO TRINTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1691, '420750', 'INDAIAL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3116, '353280', 'NOVA ALIANCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3117, '353900', 'PIRANGI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1432, '354200', 'QUINTANA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (175, '353700', 'PEDREGULHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1424, '354070', 'PORTO FERREIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2497, '230940', 'NOVO ORIENTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2753, '355475', 'TRABIJU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2843, '521600', 'PANAMA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3545, '352760', 'LUIS ANTONIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1763, '316935', 'TRES MARIAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2820, '270020', 'ANADIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3576, '352880', 'MARACAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (196, '172110', 'TOCANTINIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5404, '431170', 'MACHADINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4445, '220515', 'JACOBINA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3541, '352680', 'LENCOIS PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5024, '315890', 'SANTANA DO MANHUACU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5408, '431200', 'MARIANO MORO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4898, '310640', 'BELO VALE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5253, '110004', 'CACOAL', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2362, '250407', 'CARAUBAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3859, '420060', 'AGUAS MORNAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4011, '421240', 'PEDRAS GRANDES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2012, '430676', 'ELDORADO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (592, '280440', 'NEOPOLIS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (724, '150172', 'BRASIL NOVO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2557, '150630', 'SALVATERRA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4446, '220527', 'JATOBA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1598, '314920', 'PEDRINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3724, '431830', 'SAO GABRIEL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3428, '170270', 'AURORA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2584, '170030', 'AGUIARNOPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4034, '421575', 'SAO BERNARDINO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1023, '292190', 'MUCUGE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3235, '420790', 'IRINEOPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2549, '150555', 'PAU D''ARCO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4010, '421227', 'PASSOS MAIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (417, '320240', 'GUARAPARI', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3846, '351680', 'GASTAO VIDIGAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3580, '420165', 'ARVOREDO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4448, '220553', 'JUREMA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3449, '171780', 'PONTE ALTA DO BOM JESUS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3893, '316520', 'SAO THOME DAS LETRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5316, '521220', 'JUSSARA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1841, '290850', 'CONCEICAO DO JACUIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4859, '292320', 'OLIVEIRA DOS BREJINHOS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2086, '313800', 'LARANJAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4858, '292300', 'NOVA VICOSA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1634, '315580', 'RIO POMBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3367, '313920', 'MALACACHETA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1406, '314600', 'OURO FINO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4617, '292400', 'PAULO AFONSO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3368, '313990', 'MARIA DA FE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1374, '314090', 'MATIPO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4488, '311330', 'CARANGOLA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5005, '313830', 'LEANDRO FERREIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3953, '310150', 'ALEM PARAIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2335, '291170', 'GUANAMBI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5309, '521120', 'ITAPURANGA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2573, '150808', 'TUCUMA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2267, '411620', 'MORRETES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2881, '211280', 'VIANA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3983, '310680', 'BIAS FORTES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2629, '330020', 'ARARUAMA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4501, '311547', 'CATUTI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4032, '421567', 'SANTA TEREZINHA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4808, '280500', 'PEDRA MOLE', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5576, '314650', 'PAINS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (986, '291590', 'ITANAGRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2711, '350720', 'BORA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5310, '521140', 'ITAUCU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2363, '250610', 'FAGUNDES', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4932, '500625', 'NOVO HORIZONTE DO SUL', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1900, '250860', 'LUCENA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1361, '412550', 'SAO JOSE DOS PINHAIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4283, '312640', 'FORTUNA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (449, '411000', 'IGUARACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5353, '430635', 'DEZESSEIS DE NOVEMBRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1438, '354323', 'RIBEIRAO DOS INDIOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2824, '270090', 'BELO MONTE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2302, '412130', 'RANCHO ALEGRE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2436, '312380', 'ENGENHEIRO NAVARRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2275, '411727', 'NOVA TEBAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3707, '431630', 'ROQUE GONZALES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2591, '170240', 'ARRAIAS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (967, '291220', 'IBICOARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1946, '260670', 'IBIRAJUBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4059, '241330', 'SERRA DE SAO BENTO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3398, '150125', 'BANNACH', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2845, '313930', 'MANGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2503, '240800', 'MOSSORO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1731, '313115', 'IPABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4625, '220890', 'RIBEIRO GONCALVES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (790, '316840', 'TARUMIRIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2552, '150590', 'PORTO DE MOZ', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (137, '231080', 'PEREIRO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5340, '521645', 'PEROLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2081, '431310', 'NOVA PALMA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2672, '330630', 'VOLTA REDONDA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3925, '110009', 'ESPIGAO D''OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2722, '350940', 'CAJURU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4298, '352570', 'JOSE BONIFACIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3827, '350680', 'BOCAINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4376, '521486', 'NOVA GLORIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2105, '150470', 'MOJU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3187, '410620', 'CONTENDA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3872, '315150', 'PIUMHI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4005, '421180', 'OURO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4470, '310960', 'CACHOEIRA DA PRATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4927, '500490', 'JARAGUARI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3712, '431690', 'SANTA MARIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1094, '251540', 'SERIDO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1337, '411870', 'PAULO FRONTIN', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4087, '172090', 'TAGUATINGA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2749, '355400', 'TATUI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3263, '421770', 'SOMBRIO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5509, '354580', 'SANTA BARBARA D''OESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2493, '230310', 'CARIRE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2743, '355260', 'TABAPUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2161, '312735', 'GLAUCILANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3088, '261255', 'SANTA FILOMENA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2245, '411290', 'JUNDIAI DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4626, '220920', 'SANTA FILOMENA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2602, '170555', 'COMBINADO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1029, '292285', 'NOVA REDENCAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2968, '292890', 'SAO DESIDERIO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (235, '230830', 'MILAGRES', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1021, '292170', 'MORRO DO CHAPEU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (84, '230200', 'BARRO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1717, '312840', 'GUARANI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5586, '120020', 'CRUZEIRO DO SUL', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1545, '316920', 'TOMBOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2934, '291750', 'JACOBINA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2887, '230850', 'MOMBACA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1718, '312860', 'GUARDA-MOR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2897, '250630', 'GUARABIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3081, '261120', 'POCAO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4421, '220050', 'AMARANTE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1448, '354520', 'SALTO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5529, '355430', 'TEODORO SAMPAIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (831, '350925', 'CAJATI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (176, '353720', 'PEDRO DE TOLEDO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2203, '330100', 'CAMPOS DOS GOYTACAZES', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1446, '354480', 'SALES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2756, '355510', 'TUPI PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5535, '355645', 'VARGEM GRANDE PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1668, '350850', 'CACAPAVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4370, '521305', 'MIMOSO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3364, '313840', 'LEOPOLDINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3690, '431447', 'PINHAL GRANDE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (240, '240020', 'ACU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (930, '420455', 'CORREIA PINTO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4040, '421640', 'SAO JOAO DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4002, '421160', 'NOVA VENEZA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2048, '261420', 'SIRINHAEM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3548, '352830', 'MAGDA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2492, '230050', 'ALCANTARAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (369, '220440', 'GILBUES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3465, '351250', 'COROADOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (526, '241475', 'VENHA-VER', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3312, '430045', 'ALEGRIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (660, '110110', 'ITAPUA DO OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4393, '430367', 'CAMPESTRE DA SERRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3311, '430040', 'ALEGRETE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3714, '431710', 'SANTANA DO LIVRAMENTO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1440, '354360', 'RIFAINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3000, '293350', 'WENCESLAU GUIMARAES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1994, '170388', 'CARMOLANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4117, '210312', 'CENTRAL DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4518, '432253', 'VALE DO SOL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5524, '355270', 'TABATINGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3172, '314490', 'NOVA MODICA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4477, '311115', 'CAMPO AZUL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2585, '170040', 'ALMAS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1786, '320050', 'APIACA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (566, '250640', 'GURINHEM', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2817, '261618', 'VERTENTE DO LERIO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (686, '130110', 'CAREIRO', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2188, '317210', 'VOLTA GRANDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5413, '431235', 'MONTAURI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3412, '150610', 'PRIMAVERA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (541, '250220', 'BOM JESUS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2466, '160055', 'PRACUUBA', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3870, '315070', 'PIRAJUBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (323, '211400', 'ZE DOCA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4313, '353470', 'OURINHOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4311, '353330', 'NOVA LUZITANIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1667, '350710', 'BOM JESUS DOS PERDOES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1436, '354300', 'RIBEIRAO BRANCO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1770, '317052', 'URUCUIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4265, '312330', 'DORES DO TURVO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (15, '510560', 'MATUPA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (814, '350220', 'ANGATUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (384, '220580', 'LUZILANDIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3238, '420895', 'JARDINOPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2755, '355500', 'TUPA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2601, '170550', 'COLINAS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1915, '251335', 'SANTA INES', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3579, '420130', 'ARAQUARI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5289, '520630', 'CRISTIANOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (406, '312733', 'GAMELEIRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1682, '353060', 'MOGI DAS CRUZES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2816, '410712', 'DIAMANTE DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (823, '350590', 'BATATAIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1517, '314915', 'PEDRAS DE MARIA DA CRUZ', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3605, '420765', 'IPORA DO OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (400, '310630', 'BELO ORIENTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4972, '510530', 'LUCIARA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1599, '314960', 'PEQUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1553, '320330', 'MANTENOPOLIS', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1582, '520780', 'FIRMINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (804, '330220', 'ITAPERUNA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4277, '312530', 'FARIA LEMOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4326, '510757', 'RONDOLANDIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3635, '421569', 'SANTIAGO DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5242, '314050', 'MARTINHO CAMPOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4348, '520490', 'CAMPOS BELOS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4108, '210200', 'BOM JARDIM', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (746, '290650', 'CANDEIAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1398, '314520', 'NOVA SERRANA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4589, '316680', 'SERRA DO SALITRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (641, '290580', 'CAMAMU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4126, '210409', 'FORMOSA DA SERRA NEGRA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2452, '270470', 'MARECHAL DEODORO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2480, '211070', 'SAO DOMINGOS DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5458, '500755', 'SANTA RITA DO PARDO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4977, '510615', 'NOVA BANDEIRANTES', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3403, '150293', 'DOM ELISEU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3079, '261080', 'PEDRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2314, '412340', 'SANTA FE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2239, '411200', 'JAGUARIAIVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2303, '412140', 'REALEZA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2284, '411880', 'PEABIRU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5321, '521308', 'MINACU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (255, '250135', 'ASSUNCAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (491, '240840', 'OLHO-D''AGUA DO BORGES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2692, '350350', 'AREIAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5469, '510779', 'SANTO ANTONIO DO LESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2244, '411275', 'JESUITAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4522, '432162', 'TRAVESSEIRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (33, '521100', 'ITAPIRAPUA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4485, '311260', 'CAPINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1047, '250870', 'MAE D''AGUA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1355, '412382', 'SANTA LUCIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1647, '430440', 'CANELA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4826, '290600', 'CAMPO FORMOSO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5197, '522015', 'SAO LUIZ DO NORTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4573, '316045', 'SANTO ANTONIO DO RETIRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5444, '432255', 'VANINI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4973, '510558', 'MARCELANDIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1143, '354290', 'RIBEIRAO BONITO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (839, '351280', 'COSMOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3557, '353010', 'MIRANDOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3897, '313655', 'JOSE RAYDAN', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (433, '352990', 'MIRACATU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1999, '171630', 'PAU D''ARCO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1671, '351450', 'DUARTINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3618, '421125', 'MORRO GRANDE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (488, '240780', 'MONTE ALEGRE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4315, '353570', 'PARAISO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2533, '292590', 'QUIJINGUE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3814, '350110', 'ALTO ALEGRE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1886, '171845', 'PUGMIL', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1914, '251310', 'SALGADO DE SAO FELIX', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (401, '310730', 'BOCAIUVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (715, '150020', 'ACARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3704, '431580', 'ROCA SALES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3572, '352720', 'LORENA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3438, '170830', 'GOIANORTE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2886, '230765', 'MARACANAU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4318, '510625', 'NOVA XAVANTINA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1257, '312015', 'CRISOLITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1451, '354625', 'SANTA CRUZ DA ESPERANCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1876, '292935', 'SAO JOSE DA VITORIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (947, '290890', 'CORACAO DE MARIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2169, '313065', 'INDAIABIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (960, '291090', 'FIRMINO ALVES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2951, '292595', 'RAFAEL JAMBEIRO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (368, '220430', 'FRONTEIRAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (673, '120033', 'MANCIO LIMA', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (878, '412665', 'SULINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3608, '420845', 'ITAPOA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3845, '351610', 'FLORINIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5015, '315760', 'SANTA FE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (41, '220865', 'QUEIMADA NOVA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3236, '420850', 'ITUPORANGA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1117, '270610', 'OURO BRANCO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1813, '280030', 'ARACAJU', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5378, '430890', 'GETULIO VARGAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4119, '210317', 'CENTRO NOVO DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2506, '241230', 'SAO JOSE DO CAMPESTRE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2441, '312580', 'FERNANDES TOURINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4222, '210960', 'ROSARIO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4484, '311250', 'CAPIM BRANCO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5527, '355385', 'TAQUARIVAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1979, '314790', 'PASSOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3863, '314850', 'PAVAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4748, '431455', 'PIRAPO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2075, '430468', 'CAPELA DE SANTANA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2407, '311100', 'CAMPESTRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4219, '210880', 'PIRAPEMAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (770, '311090', 'CAMPANHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2159, '312690', 'FREI INOCENCIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1535, '316200', 'SAO GONCALO DO SAPUCAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4256, '312150', 'DESTERRO DO MELO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1764, '316940', 'TRES PONTAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (123, '230810', 'MAURITI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5061, '316440', 'SAO SEBASTIAO DA BELA VISTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1397, '314500', 'NOVA PONTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5466, '510523', 'LAMBARI D''OESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5553, '500630', 'PARANAIBA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1928, '260010', 'AFOGADOS DA INGAZEIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2045, '260550', 'FERREIROS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1060, '251090', 'PAULISTA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2380, '261330', 'SAO JOAQUIM DO MONTE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (532, '250070', 'SAO JOAO DO RIO DO PEIXE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2256, '411460', 'MARECHAL CANDIDO RONDON', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2468, '170320', 'BERNARDO SAYAO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4939, '500780', 'SELVIRIA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2124, '170900', 'GOIATINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1524, '315510', 'RIO DO PRADO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4995, '313665', 'JUATUBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2806, '410570', 'CLEVELANDIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3389, '130395', 'SAO SEBASTIAO DO UATUMA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3241, '421040', 'MARACAJA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5363, '430720', 'ERVAL GRANDE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2635, '330090', 'CAMBUCI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4778, '432035', 'SENTINELA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1689, '420700', 'ICARA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1211, '240230', 'CARAUBAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2078, '430970', 'HUMAITA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4834, '291040', 'ENCRUZILHADA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3943, '130068', 'BOA VISTA DO RAMOS', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2894, '250100', 'ARARUNA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2322, '250523', 'CUITE DE MAMANGUAPE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3699, '431531', 'QUATRO IRMAOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5396, '431087', 'JACUIZINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2760, '355600', 'URUPES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4880, '293305', 'VARZEA DA ROCA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (556, '250470', 'CONGO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1549, '320120', 'CACHOEIRO DE ITAPEMIRIM', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5534, '355610', 'VALENTIM GENTIL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1455, '354790', 'SANTO ANTONIO DA ALEGRIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (553, '250410', 'CARRAPATEIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3546, '352770', 'LUIZIANIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4800, '280190', 'CUMBE', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1181, '231126', 'QUITERIANOPOLIS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3495, '351800', 'GUARANI D''OESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3218, '420120', 'ANTONIO CARLOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2730, '355020', 'SAO MIGUEL ARCANJO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3413, '150619', 'RUROPOLIS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3501, '351890', 'GUZOLANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1567, '172120', 'TOCANTINOPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1661, '330490', 'SAO GONCALO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2467, '170200', 'ARAGUACU', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2837, '270310', 'IGACI', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (920, '420370', 'CANELINHA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2207, '330240', 'MACAE', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (795, '320070', 'ATILIO VIVACQUA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2470, '171330', 'MIRANORTE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1596, '314875', 'PEDRA BONITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5515, '354890', 'SAO CARLOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3551, '352900', 'MARILIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4548, '500635', 'PARANHOS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3453, '351000', 'CANDIDO MOTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4284, '312675', 'FRANCISCOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4160, '210820', 'PEDREIRAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1690, '420720', 'IMARUI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (111, '230590', 'IPUEIRAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1073, '251280', 'RIACHO DOS CAVALOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (609, '290010', 'ABAIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (12, '510337', 'COTRIGUACU', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3708, '431643', 'SALDANHA MARINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1814, '280060', 'BARRA DOS COQUEIROS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (295, '310130', 'ALAGOA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4180, '171888', 'SANTA MARIA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (672, '120032', 'JORDAO', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3986, '310780', 'BOM JESUS DO GALHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1909, '251170', 'PILOEZINHOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4344, '520393', 'BURITI DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1255, '311930', 'COROMANDEL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2957, '292660', 'RIBEIRA DO POMBAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4493, '311455', 'CARNEIRINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5251, '311220', 'CAPELA NOVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5560, '521770', 'PONTALINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5297, '520860', 'GOIANESIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2987, '293135', 'TEIXEIRA DE FREITAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (765, '293160', 'TEOLANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1254, '311910', 'CORINTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5493, '500769', 'SAO GABRIEL DO OESTE', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (119, '230750', 'LAVRAS DA MANGABEIRA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5464, '510350', 'DIAMANTINO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4544, '500460', 'ITAQUIRAI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1930, '260080', 'ALTINHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1966, '261380', 'SAO VICENTE FERRER', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3096, '261410', 'SERTANIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2313, '412300', 'SALTO DO LONTRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2226, '411005', 'IGUATU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2223, '410960', 'GUARATUBA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2238, '411190', 'JAGUAPITA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2228, '411020', 'INACIO MARTINS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3414, '150640', 'SANTA CRUZ DO ARARI', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2992, '293240', 'UIBAI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3324, '430175', 'BARAO DO TRIUNFO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1602, '314995', 'PERIQUITO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2863, '150553', 'PARAUAPEBAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1221, '240420', 'GOIANINHA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (208, '210735', 'NOVA OLINDA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2016, '431880', 'SAO LOURENCO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5073, '316600', 'SENHORA DE OLIVEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2328, '261500', 'TAQUARITINGA DO NORTE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (755, '292145', 'MIRANTE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (514, '241310', 'SENADOR ELOI DE SOUZA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5543, '420245', 'BOMBINHAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1960, '261180', 'RIBEIRAO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3415, '150700', 'SANTO ANTONIO DO TAUA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3669, '430340', 'CAICARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3683, '431405', 'PAROBE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4671, '231390', 'URUOCA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4761, '431675', 'SANTA CLARA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3668, '430290', 'CACEQUI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (38, '220795', 'NOVA SANTA RITA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1681, '352870', 'MARABA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3728, '431850', 'SAO JOSE DO NORTE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (961, '291100', 'FLORESTA AZUL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (579, '280160', 'CEDRO DE SAO JOAO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (18, '510680', 'PORTO DOS GAUCHOS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5196, '522005', 'SAO JOAO DA PARAUNA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4510, '432195', 'TRINDADE DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2280, '411820', 'PARANAGUA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1158, '354600', 'SANTA BRANCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5240, '420890', 'JARAGUA DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5480, '521060', 'ITAGUARU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4218, '210825', 'PEDRO DO ROSARIO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (198, '210130', 'BACURI', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1962, '261245', 'SANTA CRUZ', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2659, '330450', 'RIO DAS FLORES', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2630, '330022', 'AREAL', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4651, '230526', 'IBARETAMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5400, '431127', 'LAGOA DOS TRES CANTOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5254, '520010', 'ABADIANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4331, '510820', 'TORIXOREU', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5520, '355090', 'SAO SIMAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3537, '352600', 'JUNQUEIROPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4570, '510600', 'NORTELANDIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4592, '170600', 'COUTO MAGALHAES', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1819, '280400', 'MARUIM', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (993, '291735', 'JABORANDI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (851, '351700', 'GETULINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1243, '311280', 'CAPITOLIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2719, '350880', 'CAFELANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3769, '353200', 'MORUNGABA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3799, '291470', 'ITABERABA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4747, '431446', 'PINHAL DA SERRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (966, '291190', 'IACU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4729, '431213', 'MATO CASTELHANO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1952, '260850', 'LAGOA DO ITAENGA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5545, '420940', 'LAGUNA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3026, '260070', 'ALIANCA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2599, '170410', 'CENTENARIO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3100, '261520', 'TERRA NOVA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1935, '260300', 'CABROBO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2309, '412230', 'RIO NEGRO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1142, '354270', 'RESTINGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4524, '432190', 'TRES PASSOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1920, '251460', 'SAO JOSE DO BONFIM', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2277, '411745', 'OURO VERDE DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3523, '352280', 'ITAPORANGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1437, '354320', 'RIBEIRAO DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (773, '311450', 'CARMOPOLIS DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5037, '316105', 'SAO FELIX DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2056, '292060', 'MARAGOGIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4487, '311300', 'CARAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3791, '210330', 'CODO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4372, '521380', 'MORRINHOS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4575, '316110', 'SAO FRANCISCO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2090, '315040', 'PIEDADE DOS GERAIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5002, '313770', 'LAJINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2925, '291085', 'FILADELFIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (977, '291410', 'IPUPIARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4214, '210680', 'MIRINZAL', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2871, '210160', 'BARRA DO CORDA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4362, '521010', 'IPAMERI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4914, '500260', 'CAMAPUA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5568, '150130', 'BARCARENA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2316, '412370', 'SANTA ISABEL DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3078, '261060', 'PAUDALHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2786, '410305', 'BOA VISTA DA APARECIDA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (882, '412730', 'TERRA RICA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1862, '292160', 'MORPARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4038, '421625', 'SAO JOAO DO OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2752, '355460', 'TIMBURI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2632, '330045', 'BELFORD ROXO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (883, '412770', 'TOLEDO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1840, '290830', 'CONCEICAO DO ALMEIDA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (744, '290620', 'CANARANA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (767, '310320', 'ARACAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1372, '314060', 'MATERLANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1467, '355330', 'TAMBAU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (891, '412862', 'ALTO PARAISO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (528, '250030', 'ALAGOA GRANDE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (922, '420395', 'CAPIVARI DE BAIXO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3604, '420740', 'IMBUIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5377, '430880', 'GENERAL CAMARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2821, '270030', 'ARAPIRACA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3678, '431365', 'PALMARES DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1269, '354730', 'SANTANA DE PARNAIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4442, '220435', 'GEMINIANO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4797, '280067', 'BOQUIM', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (838, '351200', 'COLINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5290, '520660', 'CUMARI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1547, '317140', 'VIEIRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3931, '110145', 'PARECIS', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5415, '431242', 'MORMACO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3922, '353020', 'MIRANTE DO PARANAPANEMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1176, '354910', 'SAO JOAO DA BOA VISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1683, '353170', 'MONTEIRO LOBATO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3841, '351320', 'CRISTAIS PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5098, '150497', 'NOVA IPIXUNA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (30, '520760', 'FAZENDA NOVA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (845, '351500', 'EMBU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (678, '120070', 'XAPURI', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (311, '211170', 'SAO VICENTE FERRER', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4811, '280590', 'RIACHUELO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1881, '171665', 'PEQUIZEIRO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3014, '410775', 'FIGUEIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3363, '313810', 'LASSANCE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1414, '314760', 'PASSA QUATRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (898, '420070', 'ALFREDO WAGNER', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3531, '352490', 'JAMBEIRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3843, '351470', 'ECHAPORA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3277, '430355', 'CAMARGO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2758, '355560', 'UCHOA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2334, '290770', 'CHORROCHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3247, '421920', 'VIDAL RAMOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4900, '312460', 'ESTRELA DALVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (691, '130180', 'IPIXUNA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4872, '293015', 'SERRA DO RAMALHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5074, '316640', 'SERITINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2151, '230650', 'ITAPIUNA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1415, '314770', 'PASSA TEMPO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2545, '150520', 'OEIRAS DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4018, '421350', 'PORTO BELO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2565, '150747', 'SAO JOAO DE PIRABAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3441, '171200', 'LAJEADO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3630, '421460', 'RIO DO OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4422, '220080', 'ANTONIO ALMEIDA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1711, '312738', 'GOIANA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1948, '260750', 'ITAIBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3694, '431480', 'PORTAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2416, '311535', 'CATAS ALTAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2409, '311180', 'CANAPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2145, '230210', 'BATURITE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2170, '313120', 'IPANEMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1491, '313180', 'ITABIRINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1251, '311660', 'CLAUDIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1741, '313300', 'ITAMONTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (687, '130130', 'CODAJAS', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3949, '130255', 'MANAQUIRI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (757, '292450', 'PINDAI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3804, '312710', 'FRUTAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3158, '313640', 'JOAQUIM FELICIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (306, '211120', 'SAO JOSE DE RIBAMAR', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4217, '210790', 'PASSAGEM FRANCA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4418, '211245', 'TURILANDIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2914, '290200', 'ARACATU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5013, '313940', 'MANHUACU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2965, '292820', 'SANTANA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4498, '311530', 'CATAGUASES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4956, '510300', 'CHAPADA DOS GUIMARAES', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2919, '290570', 'CAMACARI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2803, '410490', 'CASTRO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (469, '412750', 'TIBAGI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1314, '411220', 'JANIOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (442, '410302', 'BOA ESPERANCA DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2971, '292920', 'SAO FRANCISCO DO CONDE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3077, '261040', 'PARNAMIRIM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4751, '431510', 'PORTO XAVIER', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2209, '330300', 'MIRACEMA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3316, '430070', 'ANTA GORDA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2265, '411590', 'MIRADOR', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (945, '290860', 'CONDE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1880, '171660', 'PEIXE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4406, '430511', 'CENTENARIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1702, '420910', 'JOINVILLE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4743, '431403', 'PARECI NOVO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5490, '432260', 'VENANCIO AIRES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4615, '290960', 'CRISOPOLIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (501, '241030', 'PRESIDENTE JUSCELINO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5084, '353630', 'PATROCINIO PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5097, '150309', 'GOIANESIA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4293, '352290', 'ITAPUI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5538, '410520', 'CERRO AZUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1608, '315080', 'PIRANGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2737, '355170', 'SERTAOZINHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1768, '317030', 'UMBURATIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2606, '170730', 'DUERE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1918, '251400', 'SAO JOAO DO CARIRI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1695, '420785', 'IRATI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4295, '352390', 'ITU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4263, '312290', 'DONA EUSEBIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3606, '420775', 'IRACEMINHA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3245, '421150', 'NOVA TRENTO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2520, '260765', 'ITAMBE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5398, '431115', 'JOIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2263, '411560', 'MATELANDIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1055, '250980', 'MULUNGU', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1075, '251315', 'SANTA CECILIA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3113, '351990', 'IEPE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3924, '353420', 'ORINDIUVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4555, '510100', 'ARAGUAIANA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5086, '353730', 'PENAPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2644, '330205', 'ITALVA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4411, '430545', 'CIDREIRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5159, '432160', 'TRAMANDAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2167, '313000', 'IBITURUNA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1488, '312940', 'IBERTIOGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4444, '220480', 'IPIRANGA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3365, '313865', 'LONTRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2152, '230730', 'JUAZEIRO DO NORTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2077, '430805', 'FAXINALZINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2921, '290755', 'CATURAMA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (707, '140010', 'BOA VISTA', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1118, '270640', 'PAO DE ACUCAR', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3393, '140023', 'CAROEBE', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2496, '230760', 'LIMOEIRO DO NORTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4205, '210510', 'ICATU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2544, '150500', 'NOVA TIMBOTEUA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2822, '270050', 'BARRA DE SANTO ANTONIO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (769, '310760', 'BOM JESUS DA PENHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3655, '430066', 'ANDRE DA ROCHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4847, '291830', 'JITAUNA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (580, '280170', 'CRISTINAPOLIS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3402, '150275', 'CONCORDIA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3408, '150460', 'MOCAJUBA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4082, '172015', 'SAO FELIX DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4957, '510305', 'CLAUDIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1068, '251250', 'QUEIMADAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2353, '240920', 'PASSAGEM', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1366, '412667', 'TAMARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2402, '310880', 'BRAUNAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2344, '311820', 'CONQUISTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1386, '314320', 'MONTE SANTO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3900, '231050', 'PEDRA BRANCA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1509, '314410', 'MUZAMBINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5094, '130070', 'BOCA DO ACRE', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5264, '520160', 'ARACU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5284, '520547', 'CHAPADAO DO CEU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3803, '312610', 'FORMIGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2538, '293010', 'SENHOR DO BONFIM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1586, '250440', 'CONCEICAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (631, '290390', 'BOM JESUS DA LAPA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4536, '500085', 'ANGELICA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1892, '250560', 'DIAMANTE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1307, '411010', 'IMBITUVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2805, '410560', 'CIDADE GAUCHA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3851, '412740', 'TERRA ROXA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2290, '411960', 'PITANGA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2318, '412385', 'SANTA MARIA DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3097, '261450', 'SURUBIM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3210, '412240', 'ROLANDIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1204, '240110', 'AREIA BRANCA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4374, '521440', 'NAZARIO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4774, '431960', 'SAO SEPE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2872, '210300', 'CAXIAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2103, '150295', 'ELDORADO DOS CARAJAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1982, '130002', 'ALVARAES', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4164, '210870', 'PIO XII', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3587, '420300', 'CACADOR', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3656, '430085', 'ARAMBARE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (352, '220272', 'COCAL DOS ALVES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3682, '431402', 'PARAISO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4752, '431515', 'PROGRESSO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3275, '430250', 'BOSSOROCA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (511, '241250', 'SAO MIGUEL', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5426, '431320', 'NOVA PETROPOLIS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4304, '352920', 'MARTINOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1782, '320010', 'AFONSO CLAUDIO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2674, '350050', 'AGUAS DE LINDOIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5544, '420730', 'IMBITUBA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3489, '351685', 'GAVIAO PEIXOTO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3160, '314505', 'NOVA PORTEIRINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1168, '354760', 'SANTA ROSA DE VITERBO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3623, '421225', 'PASSO DE TORRES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4033, '421568', 'SANTA TEREZINHA DO PROGRESSO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1471, '355440', 'TERRA ROXA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (942, '420640', 'GUARACIABA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2732, '355070', 'SAO SEBASTIAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (809, '350010', 'ADAMANTINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (749, '290689', 'CARAIBAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1565, '170330', 'BOM JESUS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1544, '316810', 'TAPIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1013, '292045', 'MANSIDAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (812, '350120', 'ALVARES FLORENCE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3446, '171510', 'NOVO ACORDO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (758, '292525', 'PONTO NOVO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4228, '211157', 'SAO PEDRO DOS CRENTES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3023, '260020', 'AFRANIO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2429, '312130', 'DESCOBERTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2120, '170070', 'ALVORADA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3071, '260910', 'MACHADOS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1356, '412395', 'SANTA MONICA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (385, '220590', 'MANOEL EMIDIO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (7, '420360', 'CAMPOS NOVOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4996, '313670', 'JUIZ DE FORA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4568, '510520', 'JUSCIMEIRA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1183, '231160', 'REDENCAO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (840, '351310', 'CRAVINHOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1576, '230140', 'ARATUBA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (576, '280100', 'CAMPO DO BRITO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2751, '355450', 'TIETE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1086, '251420', 'SAO JOSE DA LAGOA TAPADA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1927, '251720', 'VIEIROPOLIS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5536, '355700', 'VOTORANTIM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5137, '431417', 'PEDRAS ALTAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (241, '240090', 'ANTONIO MARTINS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3072, '260930', 'MIRANDIBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2525, '270060', 'BARRA DE SAO MIGUEL', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2782, '410260', 'BARRACAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4389, '521990', 'SAO FRANCISCO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4213, '210663', 'MATOES DO NORTE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3404, '150307', 'GARRAFAO DO NORTE', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (618, '290190', 'APORA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2055, '291733', 'IUIU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (31, '520915', 'GOUVELANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5266, '520260', 'AURILANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1035, '292350', 'PALMEIRAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1734, '313150', 'IPUIUNA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3899, '315490', 'RIO CASCA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1395, '314460', 'NEPOMUCENO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3004, '310060', 'AGUA BOA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3887, '315765', 'SANTA HELENA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2940, '230690', 'JAGUARIBE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4319, '510629', 'PARANAITA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4323, '510710', 'SAO JOSE DOS QUATRO MARCOS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2317, '412380', 'SANTA IZABEL DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2800, '410450', 'CAPANEMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (465, '412410', 'SANTO ANTONIO DA PLATINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2193, '320280', 'ITAPEMIRIM', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (650, '110015', 'OURO PRETO DO OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (802, '330040', 'BARRA MANSA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2724, '350970', 'CAMPOS DO JORDAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (183, '353870', 'PIRACICABA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4526, '432230', 'TUPARENDI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2217, '410860', 'GOIOERE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4183, '172085', 'SUCUPIRA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3287, '320460', 'SANTA TERESA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3181, '520420', 'CACHOEIRA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2771, '410050', 'ALTONIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (509, '241190', 'SAO FRANCISCO DO OESTE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4528, '432250', 'VACARIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4012, '421260', 'PERITIBA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2691, '350340', 'AREALVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4142, '210594', 'LAGO DOS RODRIGUES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (731, '150280', 'CURRALINHO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5364, '430730', 'ERVAL SECO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1281, '410322', 'BOM SUCESSO DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4785, '432149', 'TOROPI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (590, '280410', 'MOITA BONITA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3729, '431860', 'SAO JOSE DO OURO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3313, '430055', 'ALTO ALEGRE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4890, '310280', 'ANDRELANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4723, '431125', 'LAGOAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (867, '412530', 'SAO JORGE DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1389, '314370', 'MORRO DO PILAR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5157, '432090', 'TAPEJARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4842, '291540', 'ITAJU DO COLONIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3837, '351100', 'CASTILHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2213, '330500', 'SAO JOAO DA BARRA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1753, '313545', 'JENIPAPO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5, '420253', 'BOM JESUS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2608, '170755', 'FATIMA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1623, '315360', 'PRUDENTE DE MORAIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3638, '421650', 'SAO JOAQUIM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4027, '421505', 'RIO RUFINO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (968, '291230', 'IBICUI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (593, '280450', 'NOSSA SENHORA DA GLORIA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (786, '313290', 'ITAMOGI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (106, '230533', 'IBICUITINGA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5585, '110100', 'GOVERNADOR JORGE TEIXEIRA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1499, '313695', 'JUVENILIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1032, '292310', 'OLINDINA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (907, '420208', 'BANDEIRANTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2463, '270920', 'TRAIPU', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5590, '241105', 'TIBAU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2259, '411520', 'MARINGA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1640, '315660', 'RUBIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2553, '150600', 'PRAINHA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2899, '251140', 'PICUI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5044, '316190', 'SAO GONCALO DO RIO ABAIXO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4514, '432232', 'TURUCU', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2153, '230800', 'MASSAPE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1562, '150120', 'BAIAO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5146, '431650', 'SALVADOR DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4706, '430820', 'FLORES DA CUNHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2931, '291520', 'ITAGIBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3170, '352700', 'LINDOIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4803, '280300', 'ITABAIANINHA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (764, '293040', 'SERRA PRETA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4452, '220620', 'MIGUEL ALVES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2833, '270260', 'FEIRA GRANDE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (473, '420150', 'ARMAZEM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (915, '420287', 'BRUNOPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2915, '290280', 'BARRA DA ESTIVA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4384, '521890', 'RUBIATABA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1610, '315120', 'PIRAPORA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4483, '311240', 'CAPETINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1751, '313490', 'JACUTINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1492, '313240', 'ITAJUBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5009, '313870', 'LUMINARIAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4644, '230220', 'BEBERIBE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4979, '510620', 'NOVA BRASILANDIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5170, '510704', 'PRIMAVERA DO LESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4624, '510805', 'TERRA NOVA DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4558, '510260', 'CAMPINAPOLIS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2116, '150812', 'ULIANOPOLIS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3175, '260400', 'CARPINA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5506, '316370', 'SAO LOURENCO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2366, '251080', 'PATOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2219, '410880', 'GUAIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5561, '522160', 'URUACU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1300, '410845', 'FOZ DO JORDAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3008, '410725', 'DOURADINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3450, '171840', 'PRESIDENTE KENNEDY', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3180, '430185', 'BARRA DO GUARITA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (364, '220375', 'FARTURA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2112, '150635', 'SANTA BARBARA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1252, '311740', 'CONCEICAO DE IPANEMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2778, '410150', 'ARAPONGAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5178, '510775', 'SALTO DO CEU', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4402, '430471', 'CARAA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4597, '220173', 'BETANIA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (81, '230160', 'ASSARE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2798, '410430', 'CAMPO MOURAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (598, '280550', 'POCO VERDE', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3862, '314795', 'PATIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4184, '172093', 'TAIPAS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2253, '411410', 'MANDAGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5047, '316245', 'SAO JOAO DAS MISSOES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3124, '410820', 'FORMOSA DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2498, '231090', 'PIQUET CARNEIRO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1530, '315940', 'SANTA RITA DE IBITIPOCA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2622, '171395', 'MURICILANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (332, '220117', 'BARRA D''ALCANTARA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3853, '412795', 'TUPASSI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5085, '353690', 'PEDRANOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5078, '316695', 'SERRANOPOLIS DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (479, '240590', 'JOAO DIAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3940, '120080', 'PORTO ACRE', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5143, '430540', 'CHIAPETTA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5045, '316225', 'SAO JOAO DA LAGOA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3396, '150040', 'ALENQUER', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (729, '150250', 'CHAVES', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4713, '430965', 'HULHA NEGRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (657, '110070', 'CAMPO NOVO DE RONDONIA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3436, '170720', 'DOIS IRMAOS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3679, '431380', 'PALMITINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5499, '314310', 'MONTE CARMELO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1207, '240180', 'BREJINHO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3564, '352320', 'ITARARE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1390, '314380', 'MUNHOZ', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (716, '150030', 'AFUA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3797, '290680', 'CANSANCAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1755, '313570', 'JEQUITIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1014, '292050', 'MARACAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3658, '430120', 'ARROIO DO TIGRE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2437, '312430', 'ESPINOSA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3162, '315440', 'RESSAQUINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (313, '211176', 'SENADOR LA ROCQUE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3771, '353260', 'NHANDEARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2396, '280020', 'AQUIDABA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3483, '351550', 'FERNANDOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4371, '521340', 'MOIPORA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1820, '280460', 'NOSSA SENHORA DAS DORES', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4690, '240620', 'LAGOA D''ANTA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1727, '313050', 'ILICINEA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (597, '280530', 'PIRAMBU', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (434, '353370', 'OCAUCU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4078, '171886', 'SANTA FE DO ARAGUAIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2355, '241220', 'SAO JOSE DE MIPIBU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1837, '290590', 'CAMPO ALEGRE DE LOURDES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (95, '230420', 'CRATO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1478, '355660', 'VERA CRUZ', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3578, '420110', 'ANITAPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2990, '293190', 'TUCANO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3627, '421340', 'PONTE SERRADA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2763, '355640', 'VARGEM GRANDE DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5211, '522190', 'VARJAO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2019, '510279', 'CARLINDA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3220, '420209', 'BARRA BONITA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5329, '521483', 'NOVA CRIXAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2146, '230350', 'CASCAVEL', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2854, '130160', 'FONTE BOA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3388, '314640', 'PAINEIRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (86, '230230', 'BELA CRUZ', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (980, '291465', 'ITABELA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4588, '316630', 'SERICITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (793, '317130', 'VICOSA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2518, '260510', 'CUSTODIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3067, '260860', 'LAGOA DO OURO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1603, '315000', 'PESCADOR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (394, '220700', 'OEIRAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5333, '521530', 'ORIZONA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2864, '150613', 'REDENCAO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2066, '410350', 'CALIFORNIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2220, '410895', 'GUAMIRANGA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1365, '412640', 'SERTANEJA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1298, '410780', 'FLORAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (351, '220270', 'COCAL', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (655, '110045', 'BURITIS', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3257, '421560', 'SANTA ROSA DE LIMA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (582, '280240', 'GARARU', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4299, '352660', 'LAVRINHAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5066, '316510', 'SAO TOMAS DE AQUINO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3212, '412480', 'SAO JOAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4983, '110011', 'JARU', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4610, '260845', 'LAGOA DO CARRO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1796, '320245', 'IBATIBA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2858, '150190', 'BUJARU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (145, '353180', 'MONTE MOR', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4622, '312910', 'GURINHATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3915, '351770', 'GUARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5350, '430605', 'CRISTAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (534, '250090', 'ARARA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2453, '270500', 'MATA GRANDE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5443, '432252', 'VALE VERDE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1433, '354240', 'REGENTE FEIJO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3720, '431775', 'SANTO ANTONIO DO PLANALTO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4760, '431660', 'SANANDUVA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2587, '170110', 'APARECIDA DO RIO NEGRO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1780, '317190', 'VIRGOLANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2181, '316970', 'TURMALINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2952, '292600', 'REMANSO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2708, '350650', 'BIRIGUI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1417, '314810', 'PATROCINIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2960, '292700', 'RIO REAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1194, '231360', 'UBAJARA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4542, '500350', 'DOURADINA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3828, '350715', 'BOM SUCESSO DE ITARARE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3937, '120035', 'MARECHAL THAUMATURGO', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3171, '431440', 'PELOTAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (508, '241170', 'SAO BENTO DO TRAIRI', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2200, '320520', 'VILA VELHA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (338, '220191', 'BOM PRINCIPIO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3594, '420470', 'CUNHA PORA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1412, '314710', 'PARA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3095, '261400', 'SERRITA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (547, '250300', 'CAAPORA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1911, '251240', 'PUXINANA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4460, '220790', 'PEDRO II', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3733, '431900', 'SAO MARCOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4159, '210810', 'PAULO RAMOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3053, '260580', 'FREI MIGUELINHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (745, '290640', 'CANDEAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (756, '292275', 'NOVA IBIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5401, '431130', 'LAGOA VERMELHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (859, '352042', 'ILHA COMPRIDA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1887, '171850', 'RECURSOLANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5374, '430845', 'FORTALEZA DOS VALOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2860, '150330', 'IGARAPE-MIRI', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5386, '430995', 'IBIRAPUITA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5041, '316165', 'SAO GERALDO DO BAIXIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4410, '430537', 'CHARRUA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2183, '317043', 'UNIAO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3647, '421890', 'URUBICI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1724, '313005', 'ICARAI DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2696, '350430', 'AVAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (10, '510190', 'BRASNORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4154, '210730', 'NOVA IORQUE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (973, '291345', 'IGRAPIUNA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5479, '520960', 'HEITORAI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3794, '210780', 'PARNARAMA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4098, '210087', 'ARAGUANA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3713, '431695', 'SANTA MARIA DO HERVAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5174, '510726', 'SANTO AFONSO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4028, '421510', 'RODEIO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4464, '310855', 'BRASILANDIA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5184, '521900', 'SANCLERLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5287, '520580', 'CORUMBA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4829, '290730', 'CASTRO ALVES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3105, '316990', 'UBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1003, '291890', 'LAJEDAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5497, '313780', 'LAMBARI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3898, '314470', 'NOVA ERA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2499, '231240', 'SAO GONCALO DO AMARANTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4667, '231195', 'SALITRE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5255, '520013', 'ACREUNA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4489, '311340', 'CARATINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4951, '510185', 'BOM JESUS DO ARAGUAIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1081, '251380', 'SANTA TERESINHA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4862, '292460', 'PINDOBACU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2815, '410700', 'CURIUVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5546, '421480', 'RIO DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (265, '250940', 'MOGEIRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (283, '261010', 'PALMEIRINA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4145, '210635', 'MARAJA DO SENA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (890, '412855', 'VERA CRUZ DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3197, '411330', 'LARANJEIRAS DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3300, '421880', 'TURVO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4655, '230680', 'JAGUARIBARA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3248, '421223', 'PARAISO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4688, '240560', 'JARDIM DE PIRANHAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2873, '210360', 'COROATA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4, '330455', 'Rio de Janeiro', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3444, '171360', 'MONTE DO CARMO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4261, '312245', 'DIVISOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5368, '430781', 'ESTRELA VELHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1494, '313450', 'ITUTINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (204, '210515', 'IGARAPE DO MEIO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3964, '310340', 'ARACUAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3927, '110040', 'ALTO PARAISO', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2024, '313700', 'LADAINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1716, '312830', 'GUARANESIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1096, '251580', 'SERRA REDONDA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2900, '251530', 'SAPE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1635, '315590', 'RIO PRETO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3756, '432140', 'TENENTE PORTELA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1106, '270420', 'LIMOEIRO DE ANADIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4130, '210460', 'GOVERNADOR EUGENIO BARROS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (151, '353282', 'NOVA CAMPINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4249, '312030', 'CRISTALIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3600, '420600', 'GOVERNADOR CELSO RAMOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2231, '411065', 'IRACEMA DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3291, '320506', 'VENDA NOVA DO IMIGRANTE', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1992, '150195', 'CACHOEIRA DO PIRIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3695, '431500', 'PORTO LUCENA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1715, '312820', 'GUARACIABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3232, '420690', 'IBIRAMA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (702, '130400', 'SILVES', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3760, '432166', 'TRES CACHOEIRAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (114, '230670', 'JAGUARETAMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2985, '293120', 'TAPEROA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5432, '430517', 'CERRO GRANDE DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4238, '311860', 'CONTAGEM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3279, '430435', 'CANDIOTA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2680, '350140', 'ALVARO DE CARVALHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2687, '350275', 'ARACARIGUAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3028, '260105', 'ARACOIABA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1571, '211223', 'TRIZIDELA DO VALE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3822, '350450', 'AVARE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4118, '210315', 'CENTRO DO GUILHERME', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3186, '410380', 'CAMBIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2064, '330550', 'SAQUAREMA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2592, '170255', 'AUGUSTINOPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (120, '230770', 'MARANGUAPE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1664, '350260', 'APARECIDA D''OESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2210, '330380', 'PARATI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4111, '210215', 'BREJO DE AREIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3319, '430100', 'ARROIO DO MEIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (321, '211270', 'VARGEM GRANDE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1069, '251260', 'QUIXABA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2605, '170710', 'DIVINOPOLIS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4740, '431344', 'NOVO TIRADENTES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3332, '430230', 'BOM JESUS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3084, '261190', 'RIO FORMOSO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3458, '351080', 'CASA BRANCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1651, '330580', 'TERESOPOLIS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1614, '315210', 'PONTE NOVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2846, '314610', 'OURO PRETO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3573, '352750', 'LUCIANOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5315, '521210', 'JOVIANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2129, '210405', 'ESTREITO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5339, '521640', 'PARAUNA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5081, '316760', 'SIMONESIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2345, '312200', 'DIVINO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5503, '315720', 'SANTA BARBARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3790, '210232', 'BURITICUPU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4538, '500215', 'BODOQUENA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4963, '510345', 'DENISE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2373, '260190', 'BEZERROS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3099, '261480', 'TACARATU', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3048, '260515', 'DORMENTES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2781, '410240', 'BANDEIRANTES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1974, '317020', 'UBERLANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2287, '411920', 'PINHALAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2681, '350160', 'AMERICANA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (422, '330187', 'IGUABA GRANDE', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2655, '330410', 'PORCIUNCULA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4175, '211030', 'SANTO ANTONIO DOS LOPES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (997, '291800', 'JEQUIE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2715, '350760', 'BRAGANCA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3938, '120042', 'RODRIGUES ALVES', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (278, '260490', 'CUMARU', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5129, '431220', 'MAXIMILIANO DE ALMEIDA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (424, '330470', 'SANTO ANTONIO DE PADUA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5367, '430780', 'ESTRELA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3065, '260810', 'JOAO ALFREDO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5138, '431449', 'PINHEIRINHO DO VALE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4503, '311590', 'CHACARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4798, '280110', 'CANHOBA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4056, '241240', 'SAO JOSE DO SERIDO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4013, '421270', 'PETROLANDIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4999, '313720', 'LAGOA DA PRATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (496, '240933', 'SANTA MARIA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4856, '292225', 'MUQUEM DE SAO FRANCISCO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4336, '520050', 'ALOANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3622, '421205', 'PALMEIRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3082, '261140', 'PRIMAVERA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1625, '315380', 'QUELUZITO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4576, '316160', 'SAO GERALDO DA PIEDADE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3908, '310560', 'BARBACENA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2908, '270430', 'MACEIO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (513, '241290', 'SAO TOME', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1869, '292540', 'POTIRAGUA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3626, '421315', 'PLANALTO ALEGRE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4193, '210197', 'BOA VISTA DO GURUPI', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3252, '421360', 'PORTO UNIAO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4595, '210690', 'MONCAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3111, '350820', 'BURITIZAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3479, '351510', 'EMBU-GUACU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (76, '230075', 'AMONTADA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (798, '320313', 'JOAO NEIVA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2440, '312550', 'SAO GONCALO DO RIO PRETO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4443, '220455', 'GUARIBAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2777, '410140', 'APUCARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4614, '280740', 'TOBIAS BARRETO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (933, '420517', 'ENTRE RIOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3273, '430205', 'BENJAMIN CONSTANT DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5302, '520945', 'GUARINOS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (871, '412575', 'SAO PEDRO DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3121, '355550', 'UBIRAJARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5414, '431238', 'MONTE BELO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (860, '412400', 'SANTANA DO ITARARE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2039, '230520', 'HIDROLANDIA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1449, '354560', 'SANTA ADELIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2212, '330460', 'SANTA MARIA MADALENA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2880, '211150', 'SAO MATEUS DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2346, '312595', 'FERVEDOURO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3250, '421310', 'PIRATUBA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (393, '220695', 'NOVO SANTO ANTONIO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4784, '432145', 'TEUTONIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2131, '210760', 'PALMEIRANDIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2516, '260100', 'ANGELIM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3653, '430050', 'ALPESTRE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3178, '354805', 'SANTO ANTONIO DO ARACANGUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4599, '220915', 'SANTA CRUZ DOS MILAGRES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1723, '312965', 'IBIRACATU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4755, '431550', 'RESTINGA SECA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (214, '220090', 'AROAZES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4417, '211227', 'TUFILANDIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4814, '280730', 'TELHA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1584, '150180', 'BREVES', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (956, '291070', 'EUCLIDES DA CUNHA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1347, '412135', 'RANCHO ALEGRE D''OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (56, '221005', 'SAO JOSE DO DIVINO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1851, '291390', 'IPIAU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4645, '230240', 'BOA VIAGEM', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1026, '292230', 'MURITIBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3169, '310120', 'AIURUOCA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1745, '313380', 'ITAUNA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1744, '313370', 'ITATIAIUCU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1007, '291950', 'LIVRAMENTO DE NOSSA SENHORA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3885, '315680', 'SABINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2390, '270630', 'PALMEIRA DOS INDIOS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4942, '500830', 'TRES LAGOAS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4429, '220207', 'CAJAZEIRAS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3064, '260805', 'JATOBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3033, '260210', 'BOM CONSELHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (991, '291700', 'ITIUBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3202, '411725', 'NOVA PRATA DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (877, '412650', 'SERTANOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2299, '412090', 'QUEDAS DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1426, '354085', 'PRACINHA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1456, '354850', 'SANTOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1532, '315900', 'SANTANA DO RIACHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4258, '312190', 'DIVINESIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2670, '330600', 'TRES RIOS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5447, '432345', 'VILA NOVA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3356, '510454', 'ITANHANGA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1296, '410753', 'ENTRE RIOS DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2312, '412290', 'SALTO DO ITARARE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5158, '432150', 'TORRES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4023, '421430', 'RANCHO QUEIMADO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5431, '430500', 'CATUIPE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3723, '431805', 'SAO DOMINGOS DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (133, '231000', 'PALHANO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1774, '317090', 'VARZELANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5160, '432170', 'TRES COROAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (43, '220885', 'RIACHO FRIO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4096, '210060', 'AMARANTE DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3674, '431339', 'NOVO CABRAIS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1995, '170560', 'CONCEICAO DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (227, '221037', 'SAO LUIS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3681, '431400', 'PARAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3018, '410840', 'FRANCISCO BELTRAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1012, '292040', 'MANOEL VITORINO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (6, '420310', 'CAIBI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5423, '431295', 'NOVA BOA VISTA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2276, '411740', 'OURIZONA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (168, '353590', 'PARANAPUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (49, '220945', 'SANTO ANTONIO DOS MILAGRES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (698, '130310', 'NOVA OLINDA DO NORTE', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2279, '411810', 'PARANACITY', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2198, '320495', 'SAO ROQUE DO CANAA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2010, '412320', 'SANTA CECILIA DO PAVAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (879, '412680', 'TAPEJARA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2910, '270860', 'SAO MIGUEL DOS CAMPOS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3439, '170980', 'IPUEIRAS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (527, '250020', 'AGUIAR', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3547, '352790', 'LUTECIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1463, '355160', 'SERRA NEGRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (914, '420270', 'BOTUVERA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3222, '420290', 'BRUSQUE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2074, '430265', 'BROCHIER', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4497, '311520', 'CONCEICAO DA BARRA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3429, '170305', 'BANDEIRANTES DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5537, '355730', 'ESTIVA GERBI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5338, '521590', 'PALMINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5355, '430642', 'DOIS IRMAOS DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4003, '421165', 'NOVO HORIZONTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (147, '353210', 'MURUTINGA DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5147, '431700', 'SANTANA DA BOA VISTA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (689, '130165', 'GUAJARA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2323, '251050', 'OLIVEDOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3482, '351535', 'EUCLIDES DA CUNHA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1885, '171820', 'PORTO NACIONAL', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1442, '354390', 'RIO CLARO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3889, '315895', 'SANTANA DO PARAISO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5030, '316010', 'SANTO ANTONIO DO GRAMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1864, '292250', 'NAZARE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (134, '231020', 'PARACURU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1011, '292010', 'MAIRI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5206, '522140', 'TRINDADE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2397, '310690', 'BICAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5054, '316320', 'SAO JOSE DO ALEGRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1980, '315220', 'PORTEIRINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4240, '311880', 'CORACAO DE JESUS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3806, '317010', 'UBERABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3808, '317200', 'VISCONDE DO RIO BRANCO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1077, '251330', 'SANTA HELENA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1968, '261460', 'TABIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5563, '260680', 'IGARASSU', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1364, '412630', 'SENGES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (276, '260120', 'ARCOVERDE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5179, '510776', 'SANTA RITA DO TRIVELATO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2693, '350380', 'ARTUR NOGUEIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2660, '330452', 'RIO DAS OSTRAS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3839, '351210', 'COLOMBIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2714, '350750', 'BOTUCATU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (67, '221100', 'TERESINA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (217, '220245', 'CAPITAO GERVASIO OLIVEIRA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4746, '431442', 'PICADA CAFE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5326, '521405', 'MUNDO NOVO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5194, '521980', 'SAO DOMINGOS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (301, '211085', 'SAO FRANCISCO DO BREJAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1580, '240650', 'LAGOA NOVA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (712, '140050', 'SAO JOAO DA BALIZA', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2615, '171180', 'JUARINA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3992, '421003', 'LUZERNA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2810, '410650', 'CORONEL VIVIDA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1016, '292080', 'MARCIONILIO SOUZA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2029, '110029', 'SANTA LUZIA D''OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (721, '150145', 'BELTERRA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3763, '352910', 'MARINOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (704, '130423', 'TONANTINS', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4653, '230620', 'ITAICABA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3109, '330620', 'VASSOURAS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4036, '421600', 'SAO CARLOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4772, '431920', 'SAO NICOLAU', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (680, '130010', 'ANORI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5508, '354540', 'SALTO GRANDE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1104, '270360', 'JAPARATINGA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3214, '412580', 'SAO PEDRO DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4288, '352020', 'IGARATA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3272, '430107', 'ARROIO DO PADRE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3612, '420980', 'LEOBERTO LEAL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4173, '211020', 'SANTA RITA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4103, '210135', 'BACURITUBA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3810, '320110', 'BOM JESUS DO NORTE', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2596, '170360', 'BRASILANDIA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5425, '431303', 'NOVA ESPERANCA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5517, '354960', 'SAO JOSE DO BARREIRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2184, '317060', 'VARGEM BONITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5409, '431205', 'MARQUES DE SOUZA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1888, '171870', 'RIO DOS BOIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2168, '313020', 'IGARATINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3688, '431430', 'PEJUCARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5020, '315830', 'SANTANA DA VARGEM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1292, '410655', 'CORUMBATAI DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5547, '421620', 'SAO FRANCISCO DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3538, '352620', 'JUQUITIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3280, '430467', 'CAPIVARI DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2675, '350070', 'AGUDOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3857, '412880', 'XAMBRE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3068, '260870', 'LAGOA DOS GATOS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3645, '421840', 'TREZE DE MAIO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1922, '251520', 'SAO SEBASTIAO DO UMBUZEIRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2658, '330440', 'RIO CLARO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5100, '150835', 'VITORIA DO XINGU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2308, '412215', 'RIO BONITO DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4730, '431225', 'MINAS DO LEAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2400, '310825', 'BONITO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3779, '353600', 'PARAPUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (421, '330080', 'CACHOEIRAS DE MACACU', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2171, '313160', 'IRAI DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3132, '420985', 'LINDOIA DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2156, '231135', 'QUIXELO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2060, '316960', 'TUPACIGUARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3939, '120050', 'SENA MADUREIRA', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (932, '420515', 'DOUTOR PEDRINHO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3258, '421570', 'SANTO AMARO DA IMPERATRIZ', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2405, '311010', 'CAIANA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1400, '314540', 'OLARIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4167, '210927', 'PRESIDENTE SARNEY', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (826, '350700', 'BOITUVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2706, '350635', 'BERTIOGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4179, '171875', 'RIO SONO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5108, '420930', 'LAGES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4825, '290515', 'CAETANOS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2862, '150490', 'MUANA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3359, '313657', 'JOSENOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4813, '280700', 'SAO MIGUEL DO ALEIXO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4889, '310240', 'ALVORADA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3878, '315400', 'RAUL SOARES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1620, '315300', 'PRATINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5245, '315060', 'PIRACEMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1041, '250740', 'JERICO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1074, '251290', 'RIO TINTO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1330, '411680', 'NOVA CANTU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3032, '260170', 'BELO JARDIM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1134, '354140', 'PRESIDENTE PRUDENTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1454, '354780', 'SANTO ANDRE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2704, '350600', 'BAURU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1480, '410180', 'ARAUCARIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1590, '330390', 'PETROPOLIS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4296, '352480', 'JALES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3467, '351300', 'COTIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2192, '320260', 'ICONHA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2189, '320035', 'ALTO RIO NOVO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5012, '313925', 'MAMONAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4030, '421550', 'SANTA CECILIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1785, '320030', 'ALFREDO CHAVES', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3985, '310720', 'BOCAINA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1577, '230490', 'GROAIRAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (284, '261210', 'SALGADINHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2640, '330150', 'CORDEIRO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2517, '260240', 'BREJAO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2590, '170215', 'ARAGUANA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4719, '431060', 'ITAQUI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2401, '310850', 'BOTUMIRIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (870, '412560', 'SAO MATEUS DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3539, '352630', 'LAGOINHA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1341, '411990', 'PONTA GROSSA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (121, '230780', 'MARCO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3454, '351020', 'CAPAO BONITO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3593, '420450', 'CORUPA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (708, '140017', 'CANTA', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3568, '352510', 'JARDINOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2688, '350290', 'ARACOIABA DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5533, '355580', 'URANIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1464, '355210', 'SOCORRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3173, '210080', 'ANAPURUS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1674, '351660', 'GALIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1658, '330370', 'PARAIBA DO SUL', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (370, '220450', 'GUADALUPE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2118, '160005', 'SERRA DO NAVIO', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3217, '420020', 'AGROLANDIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5267, '520310', 'BALIZA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1504, '314085', 'MATIAS CARDOSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5555, '510637', 'PEDRA PRETA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1033, '292330', 'OURICANGAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3603, '420710', 'ILHOTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4324, '510719', 'RIBEIRAOZINHO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5105, '354990', 'SAO JOSE DOS CAMPOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2909, '270670', 'PENEDO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1496, '313560', 'JEQUITAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3732, '431890', 'SAO LUIZ GONZAGA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (144, '353160', 'MONTE CASTELO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4683, '240410', 'GALINHOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4788, '510835', 'VALE DE SAO DOMINGOS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4745, '431415', 'PAVERAMA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5358, '430680', 'ENCANTADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4967, '510450', 'INDIAVAI', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2989, '293180', 'TREMEDAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3306, '421940', 'WITMARSUM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3074, '260970', 'OROBO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1822, '280560', 'PORTO DA FOLHA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5383, '430957', 'HERVEIRAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3971, '310450', 'ARINOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (614, '290120', 'ANAGE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5003, '313790', 'LAMIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2566, '150750', 'SAO JOAO DO ARAGUAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1570, '210990', 'SANTA INES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5351, '430613', 'CRUZALTENSE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3044, '260420', 'CATENDE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1531, '315860', 'SANTANA DO DESERTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1940, '260470', 'CORRENTES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3512, '352110', 'IPEUNA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3329, '430215', 'BOA VISTA DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3696, '431513', 'POUSO NOVO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2393, '270730', 'PORTO CALVO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5496, '521015', 'IPIRANGA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1874, '292830', 'SANTANOPOLIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4373, '521400', 'MOZARLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5305, '520995', 'INDIARA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (998, '291820', 'JIQUIRICA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1244, '311320', 'CARANDAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2434, '312320', 'DORES DO INDAIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1801, '320305', 'JAGUARE', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (639, '290540', 'CAIRU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (776, '311840', 'CONSELHEIRO PENA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1518, '315130', 'PIRAUBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1000, '291840', 'JUAZEIRO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1719, '312870', 'GUAXUPE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4789, '510850', 'VERA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (743, '150390', 'JURUTI', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3798, '291060', 'ESPLANADA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1898, '250800', 'JURU', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5231, '291690', 'ITIRUCU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1061, '251120', 'PEDRAS DE FOGO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1088, '251450', 'SAO JOSE DE PIRANHAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2368, '251210', 'POMBAL', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (568, '250670', 'IMACULADA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2033, '220190', 'BOM JESUS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (987, '291640', 'ITAPETINGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2236, '411155', 'IVATE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2274, '411722', 'NOVA SANTA ROSA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1301, '410870', 'GRANDES RIOS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4074, '250330', 'CACHOEIRA DOS INDIOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1510, '314465', 'NINHEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3642, '421775', 'SUL BRASIL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5369, '430786', 'FAGUNDES VARELA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2958, '292665', 'RIBEIRAO DO LARGO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2904, '261090', 'PESQUEIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2543, '150450', 'MELGACO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4762, '431697', 'SANTA MARGARIDA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (719, '150090', 'AUGUSTO CORREA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4873, '293050', 'SERRINHA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2459, '270740', 'PORTO DE PEDRAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4116, '210290', 'CARUTAPERA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2325, '260290', 'CABO DE SANTO AGOSTINHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (866, '412510', 'SAO JOAO DO TRIUNFO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2008, '411490', 'MARILANDIA DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5437, '430590', 'CORONEL BICACO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2319, '412390', 'SANTA MARIANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5192, '521973', 'SANTO ANTONIO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4727, '431175', 'MANOEL VIANA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (212, '211167', 'SAO ROBERTO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2747, '355380', 'TAQUARITUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3795, '211220', 'TIMON', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1002, '291855', 'JUSSARI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4237, '311830', 'CONSELHEIRO LAFAIETE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1136, '354170', 'QUATA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2703, '350560', 'BARRINHA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1458, '354930', 'SAO JOAO DO PAU D''ALHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (646, '110006', 'COLORADO DO OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (485, '240725', 'MAJOR SALES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3310, '430020', 'AJURICABA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (837, '351140', 'CERQUEIRA CESAR', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3736, '431935', 'SAO PEDRO DA SERRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2866, '150745', 'SAO GERALDO DO ARAGUAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3812, '350020', 'ADOLFO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1729, '313070', 'INDIANOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2020, '510774', 'SANTA CRUZ DO XINGU', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5384, '430960', 'HORIZONTINA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (170, '353620', 'PARIQUERA-ACU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4221, '210945', 'RAPOSA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3588, '420325', 'CAPAO ALTO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2479, '210905', 'PORTO RICO DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3485, '351580', 'FLORA RICA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (904, '420170', 'ASCURRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2388, '270380', 'JOAQUIM GOMES', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1788, '320100', 'BOA ESPERANCA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4267, '312350', 'DOURADOQUARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (852, '351750', 'GUAPIACU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2442, '312660', 'FRANCISCO DUMONT', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4359, '520920', 'GUAPO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4820, '290270', 'BARRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3996, '421060', 'MASSARANDUBA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3907, '292840', 'SANTA RITA DE CASSIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3528, '352440', 'JACAREI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4201, '210408', 'FERNANDO FALCAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2164, '312825', 'GUARACIAMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (536, '250150', 'BANANEIRAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (573, '280040', 'ARAUA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (726, '150210', 'CAMETA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (510, '241210', 'SAO JOAO DO SABUGI', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1082, '251385', 'SANTO ANDRE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5406, '431177', 'MAQUINE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3701, '431545', 'RELVADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2969, '292895', 'SAO DOMINGOS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4122, '210355', 'CONCEICAO DO LAGO-ACU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1005, '291905', 'LAJEDO DO TABOCAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1993, '150770', 'SAO SEBASTIAO DA BOA VISTA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5259, '520080', 'ALVORADA DO NORTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5198, '522020', 'SAO MIGUEL DO ARAGUAIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (104, '230523', 'HORIZONTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4176, '211040', 'SAO BENEDITO DO RIO PRETO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4165, '210910', 'PRESIDENTE DUTRA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3987, '310800', 'BOM SUCESSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2403, '310940', 'BURITIZEIRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2417, '311580', 'CENTRALINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1189, '231290', 'SOBRAL', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1519, '315140', 'PITANGUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1747, '313430', 'ITUMIRIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4545, '500500', 'JARDIM', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4923, '500440', 'INOCENCIA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4954, '510268', 'CAMPOS DE JULIO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2929, '291360', 'ILHEUS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1340, '411950', 'PIRAQUARA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3253, '421380', 'PRAIA GRANDE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2963, '292770', 'SANTA CRUZ CABRALIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3511, '352100', 'IPERO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3554, '352950', 'MENDONCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2286, '411910', 'PIEN', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5079, '316710', 'SERRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5463, '510263', 'CAMPO NOVO DO PARECIS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (299, '310480', 'AUGUSTO DE LIMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3144, '432120', 'TAQUARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1563, '150495', 'NOVA ESPERANCA DO PIRIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1308, '411040', 'INDIANOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3999, '421105', 'MONTE CARLO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2477, '210598', 'LAJEADO NOVO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (713, '150010', 'ABAETETUBA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1359, '412490', 'SAO JOAO DO CAIUA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1498, '313610', 'JOANESIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1868, '292470', 'PIRIPA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3034, '260230', 'BONITO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5539, '410830', 'FOZ DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1089, '251455', 'SAO JOSE DE PRINCESA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2093, '316610', 'SENHORA DO PORTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3890, '315930', 'SANTA RITA DE JACUTINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3892, '316350', 'SAO JOSE DO JACURI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1050, '250910', 'MARI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (202, '210390', 'DUQUE BACELAR', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2054, '291140', 'GLORIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2136, '220170', 'BERTOLINIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3031, '260160', 'BELEM DE SAO FRANCISCO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (203, '210440', 'GONCALVES DIAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4327, '510777', 'SANTA TEREZINHA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3952, '130356', 'RIO PRETO DA EVA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3282, '430513', 'CERRO BRANCO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3754, '432130', 'TAQUARI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (875, '412627', 'SAUDADE DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4766, '431810', 'SAO FRANCISCO DE ASSIS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1601, '314990', 'PERDOES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4534, '432375', 'VITORIA DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3145, '432335', 'VILA LANGARO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3710, '431670', 'SANTA BARBARA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1686, '353650', 'PAULINIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3502, '351905', 'HOLAMBRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2031, '210930', 'PRESIDENTE VARGAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (571, '270930', 'UNIAO DOS PALMARES', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1505, '314150', 'MENDES PIMENTEL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (876, '412635', 'SERRANOPOLIS DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5313, '521180', 'JARAGUA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4767, '431840', 'SAO JERONIMO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (348, '220240', 'CAPITAO DE CAMPOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1539, '316443', 'SAO SEBASTIAO DA VARGEM ALEGRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2424, '311920', 'COROACI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (824, '350620', 'BENTO DE ABREU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5156, '432060', 'SEVERIANO DE ALMEIDA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4350, '520530', 'CAVALCANTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3038, '260340', 'CALUMBI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (187, '353950', 'PITANGUEIRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (244, '240450', 'GUAMARE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3849, '351870', 'GUARUJA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1522, '315390', 'RAPOSOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3191, '410755', 'FAROL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4181, '171900', 'SANTA TEREZA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3477, '351490', 'ELIAS FAUSTO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3609, '420860', 'JABORA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (82, '230170', 'AURORA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (819, '350400', 'ASSIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5029, '316000', 'SANTO ANTONIO DO AVENTUREIRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1697, '420820', 'ITAJAI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2040, '230900', 'MUCAMBO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1552, '320316', 'LARANJA DA TERRA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2938, '230360', 'CATARINA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1650, '291650', 'ITAPICURU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3601, '420650', 'GUARAMIRIM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (995, '291760', 'JAGUAQUARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3463, '351190', 'CLEMENTINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1556, '320480', 'SAO JOSE DO CALCADO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2340, '293290', 'VALENCA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5335, '521560', 'PADRE BERNARDO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4251, '312080', 'CRUZILIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3372, '314120', 'MATUTINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1388, '314350', 'MORADA NOVA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (118, '230740', 'JUCAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3974, '310530', 'BANDEIRA DO SUL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1387, '314340', 'MONTE SIAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5306, '521020', 'IPORA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2901, '251630', 'SUME', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (66, '221097', 'TANQUE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2315, '412350', 'SANTA HELENA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2794, '410395', 'CAMPINA DO SIMAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1285, '410405', 'CAMPO BONITO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2807, '410600', 'CONGONHINHAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (456, '411630', 'MUNHOZ DE MELO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4675, '240130', 'AUGUSTO SEVERO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4681, '240360', 'EXTREMOZ', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5421, '431275', 'NOVA ALVORADA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1812, '320435', 'RIO BANANAL', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (333, '220120', 'BARRAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1353, '412310', 'SANTA AMELIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5090, '110014', 'NOVA BRASILANDIA D''OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1214, '240300', 'CRUZETA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1266, '351910', 'IACANGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (789, '313507', 'JAMPRUCA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2720, '350900', 'CAIEIRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (383, '220570', 'LUIS CORREIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1418, '353780', 'PIEDADE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (274, '251690', 'UIRAUNA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5195, '522000', 'SAO JOAO D''ALIANCA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3325, '430187', 'BARRA DO QUARAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3591, '420400', 'CATANDUVAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4613, '270850', 'SAO LUIS DO QUITUNDE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5381, '430940', 'GUAPORE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4172, '211003', 'SANTA LUZIA DO PARUA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1555, '320455', 'SANTA MARIA DE JETIBA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4409, '430530', 'CHAPADA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2484, '220290', 'CORRENTE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (581, '280220', 'FEIRA NOVA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (801, '320503', 'VARGEM ALTA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4608, '251700', 'UMBUZEIRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1476, '355630', 'VALPARAISO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3493, '351760', 'GUAPIARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (652, '110026', 'RIO CRESPO', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1201, '240040', 'AGUA NOVA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4861, '292410', 'PEDRAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3914, '351560', 'FERNANDO PRESTES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3165, '316700', 'SERRANOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5513, '354800', 'SANTO ANTONIO DE POSSE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (143, '353130', 'MONTE ALTO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3583, '420215', 'BELMONTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5069, '316555', 'SETUBINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3570, '352590', 'JUNDIAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3963, '310290', 'ANTONIO CARLOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1829, '290180', 'ANTONIO GONCALVES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3155, '522068', 'SIMOLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3500, '351880', 'GUARULHOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2966, '292850', 'SANTA TERESINHA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4071, '250200', 'BELEM DO BREJO DO CRUZ', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5584, '110013', 'MACHADINHO D''OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1817, '280350', 'LAGARTO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4039, '421635', 'SAO JOAO DO ITAPERIU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (105, '230530', 'IBIAPINA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4775, '431975', 'SAO VENDELINO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4586, '316556', 'SEM-PEIXE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4196, '210240', 'CAJAPIO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1261, '312240', 'DIVISA NOVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5566, '410442', 'CANDOI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1850, '291380', 'IPECAETA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (656, '110050', 'NOVO HORIZONTE DO OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4714, '430990', 'IBIRAIARAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (238, '231150', 'QUIXERE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5392, '431053', 'ITAARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (599, '280580', 'RIACHAO DO DANTAS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5382, '430950', 'GUARANI DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3448, '171700', 'PINDORAMA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1479, '355710', 'VOTUPORANGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5342, '521730', 'PIRENOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1078, '251340', 'SANTA LUZIA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1329, '411640', 'NOSSA SENHORA DAS GRACAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4620, '311390', 'CARMO DA CACHOEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1218, '240330', 'ENCANTO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3766, '353050', 'MOCOCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2133, '210955', 'RIBAMAR FIQUENE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2250, '411370', 'LONDRINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3440, '171150', 'JAU DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5250, '292500', 'PLANALTO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5039, '316130', 'SAO FRANCISCO DE SALES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4259, '312210', 'DIVINO DAS LARANJEIRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1743, '313340', 'ITAPAGIPE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1375, '314110', 'MATOZINHOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (416, '317180', 'VIRGINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4480, '311150', 'CAMPOS ALTOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2059, '313500', 'JAGUARACU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (415, '317107', 'VEREDINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1722, '312950', 'IBIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5314, '521190', 'JATAI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1957, '261050', 'PASSIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2560, '150690', 'SANTAREM NOVO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4988, '150220', 'CAPANEMA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1977, '510623', 'NOVA OLIMPIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5187, '521930', 'SANTA HELENA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1352, '412280', 'SALGADO FILHO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4020, '421400', 'PRESIDENTE GETULIO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3283, '430544', 'CHUVISCA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3110, '350395', 'ASPASIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4650, '230480', 'GRANJEIRO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4358, '520850', 'GOIANDIRA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3259, '421580', 'SAO BENTO DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5064, '316480', 'SAO SEBASTIAO DO RIO PRETO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3752, '432080', 'SOLEDADE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2764, '355680', 'VIRADOURO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3057, '260690', 'IGUARACI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4700, '430700', 'ERECHIM', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2410, '311205', 'CANTAGALO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2618, '171250', 'MARIANOPOLIS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1954, '260920', 'MARAIAL', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3629, '421415', 'PRINCESA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1942, '260530', 'EXU', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1680, '352580', 'JULIO MESQUITA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (365, '220390', 'FLORIANO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4254, '312120', 'DELFINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1147, '354380', 'RINOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (619, '290205', 'ARACAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4168, '210940', 'PRIMEIRA CRUZ', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5027, '315950', 'SANTA RITA DO ITUETO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3995, '421050', 'MARAVILHA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2682, '350180', 'AMERICO DE CAMPOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5101, '170380', 'BURITI DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (436, '354100', 'PRAIA GRANDE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2567, '150775', 'SAPUCAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (550, '250370', 'CAJAZEIRAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4252, '312087', 'CURRAL DE DENTRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2717, '350790', 'BROTAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4046, '421730', 'SAUDADES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4676, '240150', 'BARCELONA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3735, '431930', 'SAO PAULO DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5186, '521925', 'SANTA FE DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4871, '292940', 'SAO MIGUEL DAS MATAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (700, '130370', 'SANTO ANTONIO DO ICA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5227, '510452', 'IPIRANGA DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5357, '430675', 'DOUTOR RICARDO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (881, '412710', 'TELEMACO BORBA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4024, '421450', 'RIO DO CAMPO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3269, '430047', 'ALMIRANTE TAMANDARE DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4305, '352970', 'MIGUELOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (931, '420490', 'DESCANSO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3865, '314890', 'PEDRA DO INDAIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1229, '293245', 'UMBURANAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3335, '430258', 'BOZANO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2814, '410690', 'CURITIBA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1465, '355250', 'SUZANO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (316, '211200', 'TASSO FRAGOSO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5519, '355060', 'SAO ROQUE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2669, '330590', 'TRAJANO DE MORAIS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (911, '420243', 'BOCAINA DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3125, '411360', 'LOBATO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2482, '220100', 'ARRAIAL', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3741, '431973', 'SAO VALERIO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3776, '353450', 'OSCAR BRESSANE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2917, '290430', 'BREJOES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3675, '431340', 'NOVO HAMBURGO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1270, '410010', 'ABATIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (937, '420555', 'FREI ROGERIO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2269, '411660', 'NOVA AMERICA DA COLINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (60, '221040', 'SAO MIGUEL DO TAPUIO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5269, '520350', 'BOM JESUS DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2339, '292810', 'SANTA MARIA DA VITORIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (957, '291072', 'EUNAPOLIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4895, '310460', 'ASTOLFO DUTRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4591, '316780', 'SOLEDADE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3366, '313900', 'MACHADO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3371, '314080', 'MATIAS BARBOSA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5083, '316800', 'TAIOBEIRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2419, '311680', 'COLUNA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1373, '314070', 'MATEUS LEME', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5028, '315960', 'SANTA RITA DO SAPUCAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4490, '311410', 'CARMO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4077, '250415', 'CASSERENGUE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1965, '261350', 'SAO JOSE DO BELMONTE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1925, '251650', 'TAPEROA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3904, '291550', 'ITAJUIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2489, '220800', 'PICOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (671, '120025', 'EPITACIOLANDIA', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4264, '312310', 'DORES DE GUANHAES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4519, '432270', 'VERA CRUZ', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3254, '421440', 'RIO DAS ANTAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3278, '430420', 'CANDELARIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5144, '431600', 'ROLANTE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4722, '431112', 'JAQUIRANA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4306, '353090', 'MOMBUCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (282, '260915', 'MANARI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3748, '432050', 'SERTAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4521, '432300', 'VIAMAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2495, '230495', 'GUAIUBA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1947, '260710', 'INGAZEIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1346, '412120', 'QUITANDINHA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5150, '431846', 'SAO JOSE DO HERVAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3508, '352050', 'INDAIATUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3813, '350055', 'AGUAS DE SANTA BARBARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4388, '521971', 'SANTO ANTONIO DA BARRA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4317, '510621', 'NOVA CANAA DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1958, '261100', 'PETROLANDIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1627, '315415', 'REDUTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1311, '411125', 'ITAPERUCU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (589, '280380', 'MALHADA DOS BOIS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1405, '314587', 'ORIZANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2471, '171610', 'PARAISO DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1137, '354180', 'QUEIROZ', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (397, '220740', 'PALMEIRA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4052, '241100', 'RODOLFO FERNANDES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1323, '411480', 'MARIALVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2418, '311620', 'CHIADOR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2127, '172125', 'TUPIRAMA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (440, '410105', 'ANAHY', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4818, '290195', 'APUAREMA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3867, '314980', 'PERDIZES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2107, '150506', 'NOVO REPARTIMENTO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5446, '432310', 'VICENTE DUTRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (443, '410420', 'CAMPO LARGO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4537, '500124', 'ARAL MOREIRA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4660, '230890', 'MORRINHOS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5239, '355670', 'VINHEDO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5583, '316090', 'SAO BRAS DO SUACUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1416, '314780', 'PASSA-VINTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (880, '412690', 'TAPIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1095, '251570', 'SERRA GRANDE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (727, '150230', 'CAPITAO POCO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5485, '521690', 'PILAR DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5372, '430825', 'FLORIANO PEIXOTO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2072, '421930', 'VIDEIRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2540, '150440', 'MARAPANIM', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3832, '350890', 'CAIABU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (427, '350870', 'CACONDE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4843, '291610', 'ITAPARICA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (850, '351670', 'GARCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1800, '320270', 'ITAGUACU', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1090, '251465', 'SAO JOSE DO BREJO DO CRUZ', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (109, '230550', 'IGUATU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2882, '230250', 'BREJO SANTO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3340, '430320', 'CACIQUE DOBLE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4068, '250110', 'AREIA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2242, '411250', 'JARDIM ALEGRE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (69, '221135', 'VARZEA BRANCA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1903, '250933', 'MATINHAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5359, '430692', 'ENGENHO VELHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2631, '330030', 'BARRA DO PIRAI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3595, '420500', 'DIONISIO CERQUEIRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3345, '522150', 'TURVANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1894, '250625', 'GADO BRAVO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2723, '350945', 'CAMPINA DO MONTE ALEGRE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3819, '350320', 'ARARAQUARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (742, '150380', 'JACUNDA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5294, '520740', 'EDEIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4347, '520465', 'CAMPINACU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5271, '520390', 'BURITI ALEGRE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1533, '315990', 'SANTO ANTONIO DO AMPARO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3807, '317100', 'VAZANTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1493, '313310', 'ITANHANDU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4584, '316470', 'SAO SEBASTIAO DO PARAISO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4906, '500060', 'AMAMBAI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4929, '500560', 'MIRANDA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4623, '313420', 'ITUIUTABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2011, '412840', 'URAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2789, '410330', 'BORRAZOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4458, '220770', 'PARNAIBA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1730, '313080', 'INGAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4666, '231170', 'RERIUTABA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1669, '351090', 'CASSIA DOS COQUEIROS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4721, '431100', 'JAGUARAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2195, '320360', 'MUCURICI', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (736, '150310', 'GURUPA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (423, '330385', 'PATY DO ALFERES', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2182, '317005', 'UBAPORANGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4085, '172065', 'SILVANOPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3152, '520830', 'DIVINOPOLIS DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4776, '432010', 'SARANDI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2117, '160050', 'OIAPOQUE', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1325, '411545', 'MARQUINHO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4642, '230150', 'ARNEIROZ', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4689, '240610', 'JUCURUTU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1070, '251270', 'REMIGIO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2610, '170825', 'FORTALEZA DO TABOCAO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4619, '310570', 'BARRA LONGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (806, '330400', 'PIRAI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (379, '220552', 'JULIO BORGES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1825, '290060', 'AIQUARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5389, '431040', 'INDEPENDENCIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4150, '210675', 'MIRANDA DO NORTE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1367, '412700', 'TEIXEIRA SOARES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3314, '430063', 'AMARAL FERRADOR', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4912, '500200', 'BATAYPORA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1309, '411070', 'IRATI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3246, '421189', 'PAINEL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3219, '420127', 'ARABUTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4753, '431530', 'QUARAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4242, '311900', 'CORDISLANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5140, '431507', 'PORTO VERA CRUZ', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1687, '420660', 'GUARUJA DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2150, '230560', 'INDEPENDENCIA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (453, '411345', 'LINDOESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (869, '412555', 'SAO MANOEL DO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3926, '110033', 'NOVA MAMORE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1632, '315540', 'RIO NOVO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4340, '520215', 'ARAGUAPAZ', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2943, '292440', 'PILAO ARCADO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3636, '421605', 'SAO CRISTOVAO DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5172, '510715', 'RESERVA DO CABACAL', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3738, '431940', 'SAO PEDRO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4009, '421220', 'PAPANDUVA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1058, '251040', 'OLHO D''AGUA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2885, '230655', 'ITAREMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4206, '210542', 'ITINGA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4282, '312630', 'FORTALEZA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3816, '350200', 'ANALANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1541, '316620', 'SENHORA DOS REMEDIOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1827, '290140', 'ANGICAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1951, '260820', 'JOAQUIM NABUCO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2918, '290510', 'CAEM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1430, '354165', 'QUADRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4171, '211000', 'SANTA LUZIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2038, '230185', 'BANABUIU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2847, '230370', 'CAUCAIA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1569, '210596', 'LAGOA GRANDE DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2683, '350190', 'AMPARO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4699, '430690', 'ENCRUZILHADA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3590, '420390', 'CAPINZAL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4552, '500797', 'TAQUARUSSU', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5277, '520455', 'CALDAZINHA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1998, '171380', 'PALMEIRAS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1487, '312880', 'GUIDOVAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1824, '280670', 'SAO CRISTOVAO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3901, '290405', 'BONITO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3697, '431514', 'PRESIDENTE LUCENA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1981, '120005', 'ASSIS BRASIL', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (382, '220557', 'LAGOA DE SAO FRANCISCO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2147, '230380', 'CEDRO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (975, '291370', 'INHAMBUPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2404, '310970', 'CACHOEIRA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (679, '130008', 'ANAMA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1754, '313550', 'JEQUERI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (411, '313360', 'ITAPEVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4908, '500080', 'ANAURILANDIA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4559, '510269', 'CANABRAVA DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5004, '313820', 'LAVRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2399, '310790', 'BOM REPOUSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1380, '314220', 'MIRAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1642, '315700', 'SALINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4611, '261570', 'TRIUNFO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3050, '260540', 'FEIRA NOVA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4636, '221130', 'VALENCA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5040, '316150', 'SAO GERALDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3684, '431406', 'PASSA SETE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4094, '210047', 'ALTO ALEGRE DO PINDARE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1322, '411450', 'MANOEL RIBAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2224, '410970', 'IBAITI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2801, '410460', 'CAPITAO LEONIDAS MARQUES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2548, '150549', 'PALESTINA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1295, '410730', 'DOUTOR CAMARGO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (683, '130063', 'BERURI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5145, '431640', 'ROSARIO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (234, '230660', 'ITATIRA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2849, '231220', 'SANTA QUITERIA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4170, '210970', 'SAMBAIBA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4596, '211180', 'SITIO NOVO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2893, '241440', 'TOUROS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4641, '230125', 'ARARENDA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2651, '330320', 'NILOPOLIS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5417, '431250', 'MOSTARDAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5465, '510517', 'JURUENA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2868, '160030', 'MACAPA', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3932, '110149', 'SAO FRANCISCO DO GUAPORE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (44, '220887', 'RIBEIRA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3265, '421825', 'TIMBO GRANDE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4121, '210350', 'COLINAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2713, '350745', 'BOREBI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5153, '431990', 'SAPIRANGA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (948, '290900', 'CORDEIROS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3912, '313375', 'ITAU DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4560, '510310', 'COCALINHO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1600, '314970', 'PERDIGAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3400, '150175', 'BREJO GRANDE DO ARAGUAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2609, '170770', 'FILADELFIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4459, '220779', 'PAU D''ARCO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3575, '352850', 'MAIRIPORA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5110, '430607', 'CRISTAL DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3783, '353750', 'PEREIRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3490, '351690', 'GENERAL SALGADO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5031, '316030', 'SANTO ANTONIO DO JACINTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1766, '316980', 'TURVOLANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (116, '230710', 'JARDIM', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (150, '353250', 'NEVES PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2709, '350670', 'BOA ESPERANCA DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4975, '510590', 'NOBRES', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2099, '140028', 'IRACEMA', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (263, '250830', 'LAGOA SECA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2744, '355300', 'TAGUAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (430, '351950', 'IBIRAREMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4865, '292640', 'RIACHO DE SANTANA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3488, '351640', 'FRANCO DA ROCHA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3967, '310390', 'ARAUJOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4853, '292090', 'MASCOTE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4339, '520150', 'APORE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2003, '172010', 'SAO BENTO DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1950, '260800', 'JATAUBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3817, '350250', 'APARECIDA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1665, '350330', 'ARARAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4571, '315980', 'SANTA VITORIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4308, '353190', 'MORRO AGUDO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3243, '421110', 'MONTE CASTELO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3526, '352400', 'ITUPEVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1989, '140045', 'PACARAIMA', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3431, '170384', 'CAMPOS LINDOS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (939, '420590', 'GASPAR', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1551, '320300', 'IUNA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4857, '292270', 'NOVA CANAA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3648, '421915', 'VARGEM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3789, '354060', 'PORTO FELIZ', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4297, '352530', 'JAU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2981, '293077', 'SOBRADINHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (233, '230580', 'IPU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (611, '290050', 'ERICO CARDOSO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1022, '292180', 'MORTUGABA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4838, '291270', 'IBIRAPITANGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (319, '211250', 'TUTOIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4997, '313690', 'JURUAIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1506, '314190', 'MINDURI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5468, '510677', 'PORTO ALEGRE DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4960, '510335', 'CONFRESA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4943, '510030', 'ALTO ARAGUAIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5057, '316380', 'SAO MIGUEL DO ANTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2166, '312960', 'IBIAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1588, '260640', 'GRAVATA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (14, '510515', 'JUINA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1083, '251390', 'SAO BENTO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2324, '251550', 'SERRA BRANCA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3076, '261000', 'PALMARES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2818, '261640', 'VITORIA DE SANTO ANTAO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2281, '411845', 'PATO BRAGADO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2555, '150618', 'RONDON DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (645, '110005', 'CEREJEIRAS', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4887, '310170', 'ALMENARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1699, '420840', 'ITAPIRANGA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3670, '430370', 'CAMPINA DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3348, '522220', 'VILA BOA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3343, '522070', 'SITIO D''ABADIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5393, '431057', 'ITAPUCA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4604, '241400', 'TANGARA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (487, '240750', 'MAXARANGUAPE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5376, '430870', 'GAURAMA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2227, '411007', 'IMBAU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (232, '230435', 'FORQUILHA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2015, '431555', 'RIO DOS INDIOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (538, '250157', 'BARRA DE SANTANA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (666, '110150', 'SERINGUEIRAS', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3322, '430150', 'AUGUSTO PESTANA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4756, '431575', 'RIOZINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2014, '431190', 'MARCELINO RAMOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3731, '431870', 'SAO LEOPOLDO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5080, '316720', 'SETE LAGOAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4199, '210325', 'CIDELANDIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3673, '431333', 'NOVA RAMADA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2839, '521290', 'MARZAGAO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2962, '292760', 'SANTA BRIGIDA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (441, '410230', 'BALSA NOVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1160, '354620', 'SANTA CRUZ DA CONCEICAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1462, '355140', 'SERRA AZUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4508, '311690', 'COMENDADOR GOMES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (189, '353990', 'POLONI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3060, '260740', 'ITACURUBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3764, '352960', 'MERIDIANO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (950, '290940', 'COTEGIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1097, '251593', 'SERTAOZINHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3903, '291110', 'FORMOSA DO RIO PRETO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2785, '410290', 'BITURUNA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (919, '420340', 'CAMPO BELO DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4321, '510665', 'PONTAL DO ARAGUAIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5526, '355360', 'TAPIRATIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4333, '510885', 'NOVA MARILANDIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2343, '311370', 'CARLOS CHAGAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1297, '410765', 'FAZENDA RIO GRANDE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1293, '410680', 'CRUZ MACHADO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5131, '431245', 'MORRO REDONDO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (636, '290475', 'BURITIRAMA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3540, '352640', 'LARANJAL PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3785, '353860', 'PIRACAIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2779, '410170', 'ARARUNA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4795, '250435', 'CATURITE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2110, '150570', 'PONTA DE PEDRAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3879, '315420', 'RESENDE COSTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (754, '290790', 'CIPO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3961, '310270', 'CACHOEIRA DE PAJEU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2641, '330160', 'DUAS BARRAS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (317, '211230', 'TUNTUM', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1429, '354160', 'PROMISSAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2572, '150805', 'TRAIRAO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (908, '420210', 'BARRA VELHA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3268, '430005', 'AGUA SANTA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (475, '240530', 'JANUARIO CICCO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3029, '260130', 'BARRA DE GUABIRABA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (630, '290380', 'BOA VISTA DO TUPIM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2611, '170930', 'GUARAI', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1807, '320380', 'MUQUI', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5473, '520170', 'ARAGARCAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (72, '230020', 'ACARAU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5193, '521975', 'SANTO ANTONIO DO DESCOBERTO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2964, '292800', 'SANTALUZ', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2494, '230423', 'CROATA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1376, '314140', 'MEDINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1629, '315450', 'RIACHO DOS MACHADOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2536, '312160', 'DIAMANTINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5345, '521800', 'PORANGATU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5244, '316900', 'TOCANTINS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5023, '315880', 'SANTANA DO JACARE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5311, '521150', 'ITUMBIARA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2558, '150655', 'SANTA LUZIA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (289, '292200', 'MUCURI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4924, '500450', 'ITAPORA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4907, '500070', 'ANASTACIO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5177, '510760', 'RONDONOPOLIS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4947, '510125', 'ARAPUTANGA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5237, '351620', 'FRANCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2832, '270240', 'DELMIRO GOUVEIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2114, '150670', 'SANTANA DO ARAGUAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1152, '354460', 'SABINO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5247, '312090', 'CURVELO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5494, '520110', 'ANAPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1797, '320250', 'IBIRACU', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1334, '411760', 'PALMAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5126, '431110', 'JAGUARI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3443, '171270', 'MATEIROS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2539, '150400', 'LIMOEIRO DO AJURU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4531, '432320', 'VICTOR GRAEFF', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (803, '330185', 'GUAPIMIRIM', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5038, '316120', 'SAO FRANCISCO DE PAULA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3936, '120030', 'FEIJO', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2643, '330200', 'ITAGUAI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4369, '521295', 'MATRINCHA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5077, '316670', 'SERRA DOS AIMORES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (591, '280420', 'MONTE ALEGRE DE SERGIPE', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5025, '315920', 'SANTA RITA DE CALDAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1186, '231210', 'SANTANA DO CARIRI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2802, '410480', 'CASCAVEL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2678, '350115', 'ALUMINIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (78, '230110', 'ARACATI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1163, '354660', 'SANTA FE DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (339, '220194', 'BOQUEIRAO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4250, '312070', 'CRUZEIRO DA FORTALEZA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2787, '410315', 'BOM JESUS DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3560, '352160', 'IRAPURU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (643, '354940', 'SAO JOAQUIM DA BARRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4467, '310920', 'BUENOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3913, '351370', 'DESCALVADO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4017, '421335', 'PONTE ALTA DO NORTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2137, '220265', 'CAXINGO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3107, '320440', 'RIO NOVO DO SUL', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4449, '220556', 'LAGOA DO BARRO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4512, '432215', 'TUNAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (259, '250450', 'CONDADO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1779, '317170', 'VIRGINIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2, '241020', 'Portalegre', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2671, '330615', 'VARRE-SAI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5567, '120034', 'MANOEL URBANO', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3305, '421917', 'VARGEM BONITA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1773, '317075', 'VARJAO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2367, '251110', 'PEDRA LAVRADA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2620, '171320', 'MIRACEMA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5489, '431490', 'PORTO ALEGRE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2321, '313980', 'MAR DE ESPANHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (179, '353790', 'PILAR DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (983, '291530', 'ITAGIMIRIM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3419, '150820', 'VIGIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3555, '352965', 'MESOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3128, '412470', 'SAO JERONIMO DA SERRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3040, '260350', 'CAMOCIM DE SAO FELIX', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2305, '412170', 'RESERVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (22, '510860', 'VILA RICA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (215, '220140', 'BARRO DURO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5190, '521960', 'SANTA TEREZA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (155, '353325', 'NOVAIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (941, '420630', 'GUABIRUBA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1034, '292340', 'PALMAS DE MONTE ALTO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1210, '240220', 'CANGUARETAMA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (322, '211300', 'VITORINO FREIRE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4759, '431642', 'SAGRADA FAMILIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1486, '312760', 'GOUVEIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (901, '420125', 'APIUNA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4530, '432280', 'VERANOPOLIS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (222, '220585', 'MADEIRO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4270, '312390', 'ENTRE RIOS DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (862, '412420', 'SANTO ANTONIO DO CAIUA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4500, '311545', 'CATUJI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1521, '315320', 'PRESIDENTE JUSCELINO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2844, '313650', 'JORDANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2460, '270790', 'SANTA LUZIA DO NORTE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5139, '431505', 'PORTO MAUA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (872, '412590', 'SAO PEDRO DO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (969, '291280', 'IBIRAPUA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2988, '293150', 'TEOFILANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (953, '291005', 'DIAS D''AVILA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1516, '314910', 'PEDRALVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5075, '316650', 'SERRA AZUL DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5019, '315820', 'SANTA MARIA DO SUACUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (229, '230080', 'ANTONINA DO NORTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4539, '500240', 'CAARAPO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2018, '500348', 'DOIS IRMAOS DO BURITI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1048, '250890', 'MAMANGUAPE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5275, '520430', 'CACU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (728, '150240', 'CASTANHAL', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1313, '411170', 'JABOTI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2218, '410865', 'GOIOXIM', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1331, '411700', 'NOVA FATIMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (355, '220285', 'CORONEL JOSE DIAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2230, '411050', 'IPIRANGA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (935, '420543', 'FORMOSA DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4097, '210083', 'APICUM-ACU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4191, '210125', 'BACABEIRA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1692, '420760', 'IPIRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4360, '520940', 'GUARANI DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2976, '292980', 'SAUDE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3386, '314580', 'ONCA DE PITANGUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (42, '220870', 'REDENCAO DO GURGUEIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5362, '430705', 'ERNESTINA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5182, '510790', 'SINOP', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3706, '431610', 'RONDA ALTA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (197, '210020', 'ALCANTARA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3891, '316250', 'SAO JOAO DEL REI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3157, '312590', 'FERROS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1969, '261485', 'TAMANDARE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4892, '310370', 'ARAPONGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5050, '316280', 'SAO JOAO EVANGELISTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (312, '211172', 'SATUBINHA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2996, '293317', 'VARZEDO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3868, '315010', 'PIAU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3666, '430245', 'BOQUEIRAO DO LEAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3677, '431350', 'OSORIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (896, '420050', 'AGUAS DE CHAPECO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5554, '510025', 'ALTA FLORESTA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3610, '420915', 'JOSE BOITEUX', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1528, '315737', 'SANTA CRUZ DE SALINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3942, '130030', 'AUTAZES', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3418, '150797', 'TERRA SANTA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (858, '352010', 'IGARAPAVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2746, '355370', 'TAQUARITINGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (186, '353930', 'PIRASSUNUNGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (607, '280750', 'TOMAR DO GERU', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3774, '353360', 'NUPORANGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3496, '351820', 'GUARARAPES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4854, '292150', 'MONTE SANTO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4439, '220350', 'ELESBAO VELOSO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (921, '420380', 'CANOINHAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2158, '231350', 'TRAIRI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (847, '351540', 'FARTURA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4400, '430465', 'CAPAO DO CIPO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2464, '130330', 'NOVO ARIPUANA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1714, '312810', 'GUAPE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4587, '316580', 'SENADOR JOSE BENTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (445, '410590', 'COLORADO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1093, '251510', 'SAO SEBASTIAO DE LAGOA DE ROCA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (661, '110130', 'MIRANTE DA SERRA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (97, '230427', 'ERERE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4309, '353220', 'NARANDIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (654, '110034', 'ALVORADA D''OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3747, '432045', 'SERIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3815, '350150', 'ALVINLANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4982, '510626', 'NOVO MUNDO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3043, '260415', 'CASINHAS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4648, '230400', 'COREAU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1265, '350910', 'CAIUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1659, '330412', 'QUATIS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4893, '310380', 'ARAPUA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1594, '314840', 'PAULISTAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1497, '313590', 'JESUANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2637, '330115', 'CARDOSO MOREIRA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2084, '431912', 'SAO MARTINHO DA SERRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (895, '420030', 'AGRONOMICA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3377, '314315', 'MONTE FORMOSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1112, '270540', 'MONTEIROPOLIS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2676, '350075', 'ALAMBARI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (73, '230030', 'ACOPIARA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5323, '521370', 'MONTES CLAROS DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5481, '521090', 'ITAPACI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5210, '522180', 'URUTAI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (753, '290780', 'CICERO DANTAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2157, '231200', 'SANTANA DO ACARAU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5472, '520030', 'ALEXANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1641, '315690', 'SACRAMENTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3873, '315190', 'POCRANE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1184, '231180', 'RUSSAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4316, '510617', 'NOVA NAZARE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5234, '290070', 'ALAGOINHAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4953, '510267', 'CAMPO VERDE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4915, '500295', 'CHAPADAO DO SUL', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1835, '290490', 'CACHOEIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (257, '250310', 'CABACEIRAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1344, '412050', 'PRIMEIRO DE MAIO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2262, '411550', 'MARUMBI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3958, '310210', 'ALTO RIO DOCE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3216, '412760', 'TIJUCAS DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4792, '510895', 'NOVA MONTE VERDE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (817, '350335', 'ARCO-IRIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (462, '412180', 'RIBEIRAO CLARO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (74, '230040', 'AIUABA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1983, '130050', 'BARREIRINHA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5476, '520505', 'CASTELANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4965, '510380', 'FIGUEIROPOLIS D''OESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4355, '520735', 'EDEALINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1362, '412570', 'SAO MIGUEL DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3442, '171240', 'LIZARDA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2808, '410640', 'CORNELIO PROCOPIO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2143, '221080', 'SIMPLICIO MENDES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5436, '430580', 'CONSTANTINA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1205, '240140', 'BAIA FORMOSA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (610, '290035', 'ADUSTINA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2285, '411885', 'PEROBAL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3052, '260570', 'FLORESTA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2115, '150710', 'SAO CAETANO DE ODIVELAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4640, '230090', 'APUIARES', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3103, '261610', 'VERDEJANTE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2006, '410470', 'CARLOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (710, '140030', 'MUCAJAI', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3295, '421790', 'TANGARA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4763, '431740', 'SANTIAGO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4868, '292805', 'SANTA LUZIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1804, '320334', 'MARECHAL FLORIANO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3565, '352370', 'ITIRAPUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5405, '431173', 'MAMPITUBA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1242, '311230', 'CAPELINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4257, '312170', 'DIOGO DE VASCONCELOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5397, '431090', 'JACUTINGA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4736, '431300', 'NOVA BRESCIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (399, '220760', 'PARNAGUA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5205, '522130', 'TRES RANCHOS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4749, '431470', 'PLANALTO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (584, '280270', 'ILHA DAS FLORES', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2205, '330180', 'ENGENHEIRO PAULO DE FRONTIN', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3830, '350800', 'BURI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (221, '220558', 'LAGOA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2686, '350240', 'ANHUMAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4758, '431620', 'RONDINHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (677, '120045', 'SENADOR GUIOMARD', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4424, '220130', 'BARREIRAS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1967, '261430', 'MOREILANDIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (917, '420320', 'CAMBORIU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2225, '410980', 'IBIPORA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (595, '280480', 'NOSSA SENHORA DO SOCORRO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2027, '315670', 'SABARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2775, '410115', 'ANGULO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (354, '220277', 'COLONIA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4482, '311190', 'CANA VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4269, '312385', 'ENTRE FOLHAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4433, '220260', 'CASTELO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2426, '312000', 'CORREGO NOVO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2013, '430930', 'GUAIBA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4281, '312620', 'FORMOSO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1855, '291780', 'JAGUARIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5117, '430830', 'FONTOURA XAVIER', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4971, '510525', 'LUCAS DO RIO VERDE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5141, '431535', 'QUINZE DE NOVEMBRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (936, '420545', 'FORQUILHINHA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2809, '410645', 'CORONEL DOMINGOS SOARES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4008, '421210', 'PALMITOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3824, '350535', 'BARRA DO CHAPEU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5301, '520929', 'GUARAITA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4920, '500375', 'ELDORADO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4413, '430583', 'COQUEIRO BAIXO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5154, '432030', 'SELBACH', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1921, '251480', 'SAO JOSE DOS CORDEIROS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3514, '352140', 'IRACEMAPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5550, '431680', 'SANTA CRUZ DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (934, '420520', 'ERVAL VELHO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (162, '353475', 'OUROESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4158, '210805', 'PAULINO NEVES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1036, '292360', 'PARAMIRIM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5577, '314740', 'PARAOPEBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3989, '310830', 'BORDA DA MATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4476, '311110', 'CAMPINA VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5052, '316294', 'SAO JOSE DA BARRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4163, '210860', 'PINHEIRO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4948, '510130', 'ARENAPOLIS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4919, '500330', 'COXIM', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1287, '410465', 'CARAMBEI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (381, '220555', 'LAGOA ALEGRE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3196, '411180', 'JACAREZINHO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3919, '352430', 'JABOTICABAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3456, '351040', 'CAPIVARI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5341, '521720', 'PIRANHAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5298, '520870', 'GOIANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2240, '411210', 'JANDAIA DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5344, '521760', 'PLANALTINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5209, '522157', 'UIRAPURU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3024, '260030', 'AGRESTINA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1472, '355495', 'TUIUTI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1146, '354350', 'RIVERSUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4133, '210490', 'GUIMARAES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1123, '270750', 'PORTO REAL DO COLEGIO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (512, '241280', 'SAO RAFAEL', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2026, '315030', 'PIEDADE DO RIO GRANDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1234, '310770', 'BOM JESUS DO AMPARO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4633, '221060', 'SAO RAIMUNDO NONATO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (419, '320470', 'SAO GABRIEL DA PALHA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4356, '520750', 'ESTRELA DO NORTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3584, '420230', 'BIGUACU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2215, '330560', 'SILVA JARDIM', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2450, '270375', 'JEQUIA DA PRAIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2261, '411540', 'MARMELEIRO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2799, '410440', 'CANDIDO DE ABREU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1767, '317000', 'UBAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4794, '521878', 'RIO QUENTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2627, '320530', 'VITORIA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (191, '354030', 'PONTES GESTAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (605, '280710', 'SIMAO DIAS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1500, '313862', 'LIMEIRA DO OESTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4025, '421490', 'RIO FORTUNA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4144, '210620', 'LUIS DOMINGUES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3525, '352360', 'ITIRAPINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3909, '311400', 'CARMO DA MATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1624, '315370', 'QUARTEL GERAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2721, '350920', 'CAJAMAR', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4771, '431910', 'SAO MARTINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (457, '411705', 'NOVA LARANJEIRAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3767, '353110', 'MONGAGUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3597, '420535', 'FLOR DO SERTAO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4235, '311787', 'CONFINS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4131, '210462', 'GOVERNADOR LUIZ ROCHA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3719, '431760', 'SANTO ANTONIO DA PATRULHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5439, '430610', 'CRUZ ALTA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (135, '231040', 'PARAMOTI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1663, '350100', 'ALTINOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4209, '210592', 'LAGOA DO MATO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2685, '350230', 'ANHEMBI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3631, '421507', 'RIQUEZA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1936, '260320', 'CAETES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2645, '330210', 'ITAOCARA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (280, '260720', 'IPOJUCA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2975, '292970', 'SATIRO DIAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (173, '353670', 'PEDERNEIRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (606, '280720', 'SIRIRI', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (470, '412796', 'TURVO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4465, '310860', 'BRASILIA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1781, '317220', 'WENCESLAU BRAZ', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (347, '220230', 'CANTO DO BURITI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (27, '520495', 'CAMPOS VERDES', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (267, '251150', 'PILAR', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5062, '316450', 'SAO SEBASTIAO DO MARANHAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2035, '220540', 'JOAQUIM PIRES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3049, '260520', 'ESCADA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (972, '291310', 'IBITITA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5455, '500515', 'JUTI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2327, '261240', 'SANHARO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5360, '430693', 'ENTRE-IJUIS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (269, '251275', 'RIACHAO DO BACAMARTE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (522, '241430', 'TIMBAUBA DOS BATISTAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (77, '230100', 'AQUIRAZ', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (751, '290710', 'CARINHANHA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (955, '291050', 'ENTRE RIOS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2408, '311130', 'CAMPO DO MEIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (108, '230540', 'ICO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1490, '313090', 'INHAPIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2435, '312360', 'ELOI MENDES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1618, '315270', 'PRADOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5001, '313750', 'LAGOA FORMOSA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2178, '313600', 'JOAIMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (570, '270915', 'TEOTONIO VILELA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4342, '520330', 'BELA VISTA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4554, '510040', 'ALTO GARCAS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4535, '500020', 'AGUA CLARA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (287, '261600', 'VENTUROSA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1284, '410390', 'CAMPINA DA LAGOA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3195, '411140', 'IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3856, '412863', 'DOUTOR ULYSSES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3094, '261390', 'SERRA TALHADA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4428, '220198', 'BREJO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3142, '431517', 'PROTASIO ALVES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4579, '316265', 'SAO JOAO DO PACUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3190, '410720', 'DOIS VIZINHOS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (889, '412850', 'WENCESLAU BRAZ', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (723, '150170', 'BRAGANCA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4401, '430470', 'CARAZINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3667, '430270', 'BUTIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5420, '431270', 'NONOAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3506, '352030', 'IGUAPE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1165, '354700', 'SANTA MARIA DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1128, '270810', 'SANTANA DO MUNDAU', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1619, '315290', 'PRATAPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4841, '291500', 'ITAETE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (482, '240670', 'LAJES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4453, '220660', 'MONTE ALEGRE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4226, '211090', 'SAO FRANCISCO DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1996, '170820', 'FORMOSO DO ARAGUAIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1975, '430160', 'BAGE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2616, '171190', 'LAGOA DA CONFUSAO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4004, '421170', 'ORLEANS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2338, '292390', 'PAU BRASIL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4562, '510336', 'CONQUISTA D''OESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5033, '316050', 'SANTO ANTONIO DO RIO ABAIXO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5291, '520670', 'DAMIANOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5022, '315870', 'SANTANA DO GARAMBEU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3260, '421670', 'SAO JOSE DO CEDRO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5199, '522026', 'SAO MIGUEL DO PASSA QUATRO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (207, '210670', 'MIRADOR', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2000, '171670', 'COLMEIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (40, '220830', 'PIRACURUCA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2679, '350130', 'ALVARES MACHADO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (925, '420420', 'CHAPECO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4065, '250040', 'ALAGOA NOVA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4287, '351980', 'ICEM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4897, '310550', 'BARAO DE MONTE ALTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3532, '352500', 'JANDIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (300, '211080', 'SAO FELIX DE BALSAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4115, '210270', 'CANTANHEDE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5060, '316420', 'SAO ROMAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3385, '314545', 'OLHOS-D''AGUA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1167, '354750', 'SANTA RITA DO PASSA QUATRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (140, '353080', 'MOJI MIRIM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4247, '312010', 'COUTO DE MAGALHAES DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (288, '290020', 'ABARE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4146, '210637', 'MARANHAOZINHO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4764, '431755', 'SANTO ANTONIO DO PALMA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4053, '241140', 'SANTANA DO MATOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1867, '292303', 'NOVO HORIZONTE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (125, '230840', 'MISSAO VELHA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1177, '354925', 'SAO JOAO DE IRACEMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1240, '311160', 'CAMPOS GERAIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4590, '316750', 'SIMAO PEREIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5587, '130006', 'AMATURA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (664, '110147', 'PRIMAVERA DE RONDONIA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (286, '261510', 'TEREZINHA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4502, '311560', 'CEDRO DO ABAETE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (714, '150013', 'ABEL FIGUEIREDO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2473, '210040', 'ALTAMIRA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1507, '314280', 'MONTE ALEGRE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4769, '431849', 'SAO JOSE DO INHACORA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2614, '171110', 'ITAPORA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3, '261160', 'Recife', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1834, '290440', 'BREJOLANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3823, '350490', 'BANANAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (210, '211010', 'SANTA QUITERIA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3384, '314537', 'NOVORIZONTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5256, '520020', 'AGUA LIMPA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2840, '521350', 'MONTE ALEGRE DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3800, '292110', 'MEDEIROS NETO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5235, '292950', 'SAO SEBASTIAO DO PASSE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4851, '291995', 'MAETINGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3941, '130014', 'APUI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1185, '231190', 'SABOEIRO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1232, '310430', 'AREADO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1484, '312670', 'FRANCISCO SA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4190, '210090', 'ARAIOSES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (785, '313170', 'ITABIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1725, '313030', 'IGUATAMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5283, '520540', 'CERES', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1739, '313270', 'ITAMBACURI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (68, '221120', 'URUCUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3871, '315110', 'PIRAPETINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2173, '313330', 'ITAOBIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2068, '410930', 'GUARANIACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2774, '410110', 'ANDIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3056, '260660', 'IBIMIRIM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5394, '431070', 'ITATIBA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4436, '220300', 'CRISTALANDIA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1271, '410040', 'ALMIRANTE TAMANDARE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1306, '410990', 'ICARAIMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (732, '150285', 'CURUA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (748, '290670', 'CANDIDO SALES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1548, '320040', 'ANCHIETA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4728, '431198', 'MARIANA PIMENTEL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1407, '314630', 'PADRE PARAISO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2278, '411770', 'PALMEIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2172, '313250', 'ITAMARANDIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (539, '250170', 'BARRA DE SAO MIGUEL', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1351, '412250', 'RONCADOR', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1209, '240200', 'CAICO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4099, '210095', 'ARAME', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5564, '310930', 'BURITIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1450, '354570', 'SANTA ALBERTINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1332, '411721', 'NOVA SANTA BARBARA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1879, '171575', 'PALMEIROPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4654, '230630', 'ITAPAGE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4494, '311460', 'CARRANCAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1520, '315217', 'PONTO DOS VOLANTES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (706, '130440', 'URUCURITUBA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1020, '292140', 'MIRANGABA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2249, '411340', 'LEOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3734, '431915', 'SAO MIGUEL DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4151, '210700', 'MONTES ALTOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3303, '421900', 'URUSSANGA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1560, '120060', 'TARAUACA', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (946, '290870', 'CONDEUBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5151, '431848', 'SAO JOSE DO HORTENCIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5470, '510780', 'SANTO ANTONIO DO LEVERGER', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2233, '411100', 'ITAMBARACA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3293, '421755', 'SERRA ALTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2804, '410540', 'CHOPINZINHO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4079, '171889', 'SANTA RITA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3338, '430300', 'CACHOEIRA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5424, '431301', 'NOVA CANDELARIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3649, '421935', 'VITOR MEIRELES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (640, '290550', 'CALDEIRAO GRANDE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4621, '312300', 'DORES DE CAMPOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2796, '410410', 'CAMPO DO TENENTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5403, '431160', 'LIBERATO SALZANO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3346, '522170', 'URUANA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (314, '211178', 'SERRANO DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5416, '431247', 'MORRO REUTER', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2765, '355695', 'VITORIA BRASIL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (701, '130390', 'SAO PAULO DE OLIVENCA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (815, '350270', 'APIAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (304, '211107', 'SAO JOAO DO SOTER', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4471, '310980', 'CACHOEIRA DOURADA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4635, '221095', 'TAMBORIL DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1684, '353430', 'ORLANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3256, '421545', 'SANGAO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5175, '510735', 'SAO JOSE DO XINGU', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4991, '150760', 'SAO MIGUEL DO GUAMA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3209, '412175', 'RESERVA DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (822, '350540', 'BARRA DO TURVO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2025, '314250', 'MONJOLOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3997, '421080', 'MELEIRO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1784, '320016', 'AGUA DOCE DO NORTE', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (124, '230835', 'MILHA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2734, '355100', 'SAO VICENTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1124, '270760', 'QUEBRANGULO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3826, '350630', 'BERNARDINO DE CAMPOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3151, '520340', 'BOM JARDIM DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4227, '211125', 'SAO JOSE DOS BASILIOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1039, '292105', 'MATINA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4543, '500410', 'GUIA LOPES DA LAGUNA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2531, '291600', 'ITANHEM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4073, '250280', 'BREJO DO CRUZ', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (9, '510080', 'APIACAS', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3894, '316690', 'SERRANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4869, '292880', 'SANTO ESTEVAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1585, '290750', 'CATU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4823, '290420', 'BOTUPORA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1583, '260200', 'BODOCO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1794, '320200', 'DORES DO RIO PRETO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2628, '330010', 'ANGRA DOS REIS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (832, '350950', 'CAMPINAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (463, '412260', 'RONDON', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3586, '420280', 'BRACO DO NORTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (647, '110007', 'CORUMBIARA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2005, '410060', 'ALTO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1645, '315730', 'SANTA BARBARA DO TUGURIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4553, '510020', 'AGUA BOA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3201, '411610', 'MOREIRA SALES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4425, '220160', 'BENEDITINOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (93, '230365', 'CATUNDA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1357, '412402', 'SANTA TEREZA DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3504, '351925', 'IARAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3192, '410832', 'FRANCISCO ALVES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4088, '172097', 'TALISMA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1339, '411925', 'PINHAL DE SAO BENTO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1320, '411400', 'MAMBORE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4877, '293200', 'UAUA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3309, '430010', 'AGUDO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3225, '420480', 'CURITIBANOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5121, '431010', 'IGREJINHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (237, '231070', 'PENTECOSTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4656, '230720', 'JATI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1215, '240310', 'CURRAIS NOVOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3244, '421120', 'MORRO DA FUMACA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4525, '432210', 'TUCUNDUVA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4711, '430915', 'GRAMADO XAVIER', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3571, '352650', 'LAVINIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4744, '431407', 'PASSO DO SOBRADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3549, '352860', 'MANDURI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3644, '421830', 'TRES BARRAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3530, '352470', 'JAGUARIUNA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1997, '171195', 'LAGOA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4691, '240660', 'LAGOA SALGADA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1379, '314210', 'MIRADOURO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4511, '432200', 'TRIUNFO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (940, '420620', 'GRAVATAL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3426, '170190', 'ARAGUACEMA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5441, '430637', 'DILERMANDO DE AGUIAR', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3445, '171420', 'NATIVIDADE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2666, '330540', 'SAPUCAIA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2571, '150800', 'TOME-ACU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2725, '354950', 'SAO JOSE DA BELA VISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4106, '210170', 'BARREIRINHAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3302, '421895', 'URUPEMA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4189, '210070', 'ANAJATUBA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4325, '510729', 'SAO JOSE DO POVO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3361, '313730', 'LAGOA DOS PATOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (127, '230910', 'MULUNGU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4232, '311730', 'CONCEICAO DAS ALAGOAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (211, '211102', 'SAO JOAO DO CARU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2776, '410130', 'ANTONIO OLINTO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2738, '355180', 'SETE BARRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1219, '240390', 'FRANCISCO DANTAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4577, '316220', 'SAO JOAO BATISTA DO GLORIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5091, '110090', 'CASTANHEIRAS', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4724, '431140', 'LAJEADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1187, '231250', 'SAO JOAO DO JAGUARIBE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (163, '353480', 'OURO VERDE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3877, '315350', 'ALTO JEQUITIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4139, '210547', 'JENIPAPO DOS VIEIRAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4110, '210207', 'BOM LUGAR', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5462, '510120', 'ARAGUAINHA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (916, '420315', 'CALMON', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (110, '230570', 'IPAUMIRIM', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2332, '280680', 'SAO DOMINGOS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4092, '210015', 'AGUA DOCE DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5096, '150110', 'BAGRE', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2979, '293020', 'SENTO SE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4827, '290630', 'CANAVIEIRAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4832, '290910', 'CORIBE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5559, '521250', 'LUZIANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5276, '520450', 'CALDAS NOVAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5279, '520470', 'CAMPINORTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1024, '292210', 'MUNDO NOVO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1394, '314450', 'NAZARENO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5487, '522200', 'VIANOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3970, '310445', 'ARICANDUVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2175, '313480', 'JACUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3988, '310810', 'BONFIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1502, '313950', 'MANHUMIRIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1607, '315057', 'PINTOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5304, '520990', 'IACIARA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4349, '520510', 'CATALAO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1772, '317070', 'VARGINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4551, '500793', 'SONORA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3959, '310230', 'ALVINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5460, '500795', 'TACURU', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1273, '410100', 'AMPERE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (874, '412620', 'SAPOPEMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1895, '250660', 'IBIARA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1203, '240100', 'APODI', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1611, '315160', 'PLANURA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (291, '292430', 'PIATA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1790, '320150', 'COLATINA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2972, '292925', 'SAO GABRIEL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3285, '430570', 'CONDOR', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (309, '211153', 'SAO PEDRO DA AGUA BRANCA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4658, '230820', 'MERUOCA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4742, '431370', 'PALMEIRA DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1063, '251160', 'PILOES', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2955, '292630', 'RIACHAO DO JACUIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3730, '431862', 'SAO JOSE DOS AUSENTES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1423, '354040', 'POPULINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3717, '431730', 'SANTA VITORIA DO PALMAR', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1938, '260392', 'CARNAUBEIRA DA PENHA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1735, '313190', 'ITABIRITO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3661, '430180', 'BARRACAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3223, '420415', 'CELSO RAMOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2022, '520810', 'FORMOSO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4405, '430495', 'CASEIROS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1107, '270450', 'MARAGOGI', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (794, '320020', 'ALEGRE', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2085, '432143', 'TERRA DE AREIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3947, '130185', 'IRANDUBA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2304, '412150', 'REBOUCAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (90, '230300', 'CARIDADE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (725, '150178', 'BREU BRANCO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4634, '221065', 'SIGEFREDO PACHECO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4812, '280630', 'SANTA LUZIA DO ITANHY', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3933, '110160', 'THEOBROMA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2875, '210570', 'LAGO DA PEDRA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4506, '311650', 'CLARO DOS POCOES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3831, '350840', 'CABREUVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4661, '230950', 'OROS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2922, '290880', 'CONTENDAS DO SINCORA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1871, '292593', 'QUIXABEIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (820, '350460', 'BADY BASSITT', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1649, '231270', 'SENADOR POMPEU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3167, '316570', 'SENADOR FIRMINO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1884, '171800', 'PORTO ALEGRE DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5518, '355010', 'SAO MANUEL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3194, '411080', 'IRETAMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2740, '355220', 'SOROCABA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5582, '315750', 'SANTA EFIGENIA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (853, '351790', 'GUARACI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2132, '210890', 'POCAO DE PEDRAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1190, '231300', 'SOLONOPOLE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2047, '261130', 'POMBOS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (157, '353350', 'NOVO HORIZONTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3726, '431843', 'SAO JOAO DO POLESINE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3042, '260390', 'CARNAIBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2439, '312510', 'EXTREMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (910, '420240', 'BLUMENAU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1280, '410310', 'BOCAIUVA DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3469, '351340', 'CRUZEIRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2869, '171430', 'NAZARE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4058, '241300', 'SAO VICENTE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2030, '120038', 'PLACIDO DE CASTRO', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3308, '421985', 'ZORTEA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4178, '171855', 'RIACHINHO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2342, '310900', 'BRUMADINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1403, '314570', 'OLIVEIRA FORTES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4866, '292670', 'RIO DE CONTAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4805, '280390', 'MALHADOR', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1919, '251440', 'SAO JOSE DE ESPINHARAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (279, '260650', 'IATI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4447, '220545', 'JOCA MARQUES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (219, '220415', 'FRANCISCO MACEDO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1816, '280310', 'ITABI', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5072, '316590', 'SENADOR MODESTINO GONCALVES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3387, '314620', 'OURO VERDE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (782, '312900', 'GUIRICEMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1636, '315600', 'RIO VERMELHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3968, '310410', 'ARCEBURGO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2430, '312180', 'DIONISIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4922, '500430', 'IGUATEMI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4913, '500230', 'BRASILANDIA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1291, '410610', 'CONSELHEIRO MAIRINCK', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2271, '411690', 'NOVA ESPERANCA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2067, '410657', 'CRUZEIRO DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2783, '410270', 'BARRA DO JACARE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5233, '261250', 'SANTA CRUZ DO CAPIBARIBE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (970, '291290', 'IBIRATAIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4322, '510685', 'PORTO ESTRELA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3036, '260280', 'BUIQUE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2083, '431645', 'SALTO DO JACUI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3503, '351920', 'IACRI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3434, '170610', 'CRISTALANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4738, '431330', 'NOVA PRATA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3288, '320465', 'SAO DOMINGOS DO NORTE', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1133, '354130', 'PRESIDENTE EPITACIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2625, '171570', 'PALMEIRANTE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1157, '354550', 'SANDOVALINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2154, '230945', 'OCARA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1805, '320335', 'MARILANDIA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3740, '431970', 'SAO VALENTIM', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1511, '314535', 'NOVO ORIENTE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3829, '350770', 'BRAUNA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1832, '290350', 'BELO CAMPO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1428, '354120', 'PRESIDENTE BERNARDES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3533, '352520', 'JARINU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4382, '521810', 'PORTELANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (825, '350660', 'BIRITIBA-MIRIM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5501, '314800', 'PATOS DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4127, '210410', 'FORTALEZA DOS NOGUEIRAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5531, '355490', 'TRES FRONTEIRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2052, '290220', 'ARAMARI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1279, '410275', 'BELA VISTA DA CAROBA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2034, '220280', 'CONCEICAO DO CANINDE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3114, '352380', 'ITOBI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (627, '290310', 'BARRA DO ROCHA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (472, '420010', 'ABELARDO LUZ', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2377, '260890', 'LIMOEIRO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1675, '351720', 'GUAICARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (648, '110008', 'COSTA MARQUES', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3394, '140040', 'NORMANDIA', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (35, '220780', 'PAULISTANA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2603, '170625', 'CRIXAS DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2307, '412210', 'RIO BOM', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4223, '210975', 'SANTA FILOMENA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (885, '412788', 'TUNAS DO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4969, '510490', 'JANGADA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3299, '421860', 'TROMBUDO CENTRAL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4517, '432240', 'URUGUAIANA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4105, '210150', 'BARAO DE GRAJAU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3451, '350980', 'CAMPOS NOVOS PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4670, '231355', 'TURURU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5327, '521450', 'NEROPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2214, '330530', 'SAO SEBASTIAO DO ALTO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (730, '150277', 'CURIONOPOLIS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1806, '320350', 'MONTANHA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1775, '317103', 'VERDELANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4970, '510500', 'JAURU', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3761, '432183', 'TRES FORQUILHAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4638, '230015', 'ACARAPE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1631, '315500', 'RIO DOCE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1004, '291900', 'LAJEDINHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4910, '500100', 'APARECIDA DO TABOADO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3001, '293360', 'XIQUE-XIQUE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (530, '250053', 'ALCANTIL', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1757, '316820', 'TAPIRAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2932, '291580', 'ITAMBE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (777, '311940', 'CORONEL FABRICIANO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3422, '160040', 'MAZAGAO', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4804, '280340', 'JAPOATA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2834, '270270', 'FELIZ DESERTO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2061, '320115', 'BREJETUBA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4335, '520017', 'AGUA FRIA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4437, '220325', 'CURRALINHOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4419, '220010', 'AGRICOLANDIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5319, '521280', 'MARA ROSA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4846, '291770', 'JAGUARARI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4839, '291320', 'IBOTIRAMA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (695, '130250', 'MANACAPURU', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1236, '310890', 'BRASOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1238, '311060', 'CAMBUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3884, '315640', 'ROMARIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (771, '311200', 'CANDEIAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2176, '313510', 'JANAUBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (404, '311050', 'CAMANDUCAIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4875, '293090', 'TABOCAS DO BREJO VELHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4075, '250360', 'CAICARA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3123, '410530', 'CEU AZUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2509, '250460', 'CONDE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1277, '410210', 'ASTORGA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3208, '412070', 'QUATIGUA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3182, '410190', 'ASSAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2936, '291930', 'LENCOIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3041, '260380', 'CAPOEIRAS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2813, '410685', 'CRUZMALTINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2961, '292750', 'SANTA BARBARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4481, '311170', 'CANAA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4461, '220810', 'PIMENTEIRAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4754, '431532', 'QUEVEDOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3753, '432100', 'TAPERA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5328, '521480', 'NOVA AURORA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (484, '240720', 'MACAU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (58, '221035', 'SAO LOURENCO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5523, '355230', 'SUD MENNUCCI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4007, '421200', 'PALMA SOLA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3231, '420675', 'IBIAM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1192, '231325', 'TARRAFAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2336, '291620', 'ITAPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (913, '420260', 'BOM RETIRO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (174, '353680', 'PEDRA BELA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4113, '210237', 'CACHOEIRA GRANDE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4383, '521839', 'PROFESSOR JAMIL', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (142, '353120', 'MONTE ALEGRE DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2733, '355080', 'SAO SEBASTIAO DA GRAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3768, '353150', 'MONTE AZUL PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1447, '354490', 'SALES OLIVEIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3918, '352260', 'ITAPIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1178, '231095', 'PIRES FERREIRA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (184, '353880', 'PIRAJU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4140, '210550', 'JOAO LISBOA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1808, '320390', 'NOVA VENECIA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2535, '310740', 'BOM DESPACHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3420, '160015', 'PEDRA BRANCA DO AMAPARI', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2613, '171090', 'ITAPIRATINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (888, '412830', 'UNIFLOR', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1135, '354150', 'PRESIDENTE VENCESLAU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1431, '354190', 'QUELUZ', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4407, '430515', 'CERRO GRANDE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4523, '432180', 'TRES DE MAIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2598, '170389', 'CARRASCO BONITO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1062, '251130', 'PIANCO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (224, '220775', 'PASSAGEM FRANCA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3476, '351480', 'ELDORADO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4916, '500310', 'CORGUINHO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (477, '240570', 'JARDIM DO SERIDO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3628, '421390', 'PRESIDENTE CASTELLO BRANCO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2174, '313400', 'ITINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4124, '210400', 'ESPERANTINOPOLIS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1224, '240480', 'IPUEIRA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3020, '251710', 'VARZEA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2879, '211050', 'SAO BENTO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2007, '410965', 'HONORIO SERPA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2248, '411325', 'LARANJAL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (476, '240550', 'JARDIM DE ANGICOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1391, '314420', 'NACIP RAYDAN', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5070, '316557', 'SENADOR AMARAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2604, '170650', 'DARCINOPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2561, '150715', 'SAO DOMINGOS DO ARAGUAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4888, '310200', 'ALTEROSA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4978, '510619', 'NOVA SANTA HELENA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5477, '520650', 'CROMINIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5516, '354920', 'SAO JOAO DAS DUAS PONTES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3550, '352890', 'MARIAPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2689, '350310', 'ARANDU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4802, '280260', 'GRACHO CARDOSO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1179, '231100', 'PORANGA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5201, '522045', 'SENADOR CANEDO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5273, '520400', 'CABECEIRAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (634, '290460', 'BRUMADO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1534, '316140', 'SAO FRANCISCO DO GLORIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3007, '310100', 'AGUAS VERMELHAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1526, '315650', 'RUBELITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (612, '290080', 'ALCOBACA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1067, '251230', 'PRINCESA ISABEL', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4938, '500750', 'ROCHEDO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4561, '510325', 'COLNIZA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5171, '510706', 'QUERENCIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4450, '220560', 'LANDRI SALES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2365, '250855', 'LOGRADOURO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1278, '410250', 'BARBOSA FERRAZ', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1328, '411605', 'MISSAL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3013, '410770', 'FENIX', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1931, '260110', 'ARARIPINA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3139, '430920', 'GRAVATAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3621, '421190', 'PALHOCA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4153, '210725', 'NOVA COLINAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3378, '314345', 'MONTEZUMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2163, '312790', 'GRUPIARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2581, '160053', 'PORTO GRANDE', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (34, '220777', 'PATOS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (303, '211105', 'SAO JOAO DO PARAISO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (166, '353560', 'PARAIBUNA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (676, '120043', 'SANTA ROSA DO PURUS', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1461, '355110', 'SARAPUI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4647, '230393', 'CHORO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1621, '315310', 'PRESIDENTE BERNARDES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4918, '500325', 'COSTA RICA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4515, '432234', 'UBIRETAMA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4937, '500730', 'RIO NEGRO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4781, '432085', 'TABAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3318, '430090', 'ARATIBA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3742, '431980', 'SAO VICENTE DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3745, '432026', 'SEGREDO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3098, '261470', 'TACAIMBO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4274, '312480', 'ESTRELA DO SUL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1079, '251365', 'SANTAREM', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3527, '352410', 'ITUVERAVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3090, '261290', 'SAO BENEDITO DO SUL', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3261, '421680', 'SAO JOSE DO CERRITO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3478, '351492', 'ELISIARIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4777, '432023', 'SEDE NOVA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4899, '310665', 'BERIZAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3470, '351360', 'CUNHA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3664, '430222', 'BOA VISTA DO CADEADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5278, '520460', 'CAMPESTRE DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3262, '421760', 'SIDEROPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1170, '354770', 'SANTO ANASTACIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4043, '421710', 'SAO MARTINHO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2488, '220720', 'PADRE MARCOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3234, '420780', 'IRANI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (587, '280330', 'JAPARATUBA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3883, '315610', 'RITAPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1605, '315020', 'PIEDADE DE PONTE NOVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (156, '353340', 'NOVA ODESSA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (292, '293250', 'UNA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3228, '420580', 'GARUVA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2664, '330513', 'SAO JOSE DE UBA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4420, '220030', 'ALTO LONGA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (374, '220520', 'JAICOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1877, '292937', 'SAO JOSE DO JACUIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1072, '251276', 'RIACHAO DO POCO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5308, '521056', 'ITAGUARI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3993, '421010', 'MAFRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3184, '410304', 'BOA VENTURA DE SAO ROQUE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3524, '352350', 'ITATINGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2121, '170220', 'ARAGUATINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (923, '420410', 'CAXAMBU DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4375, '521470', 'NOVA AMERICA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1854, '291630', 'ITAPEBI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5280, '520485', 'CAMPO LIMPO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1554, '320400', 'PANCAS', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1319, '411375', 'LUNARDELLI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3416, '150746', 'SAO JOAO DA PONTA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2574, '150815', 'URUARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (811, '350090', 'ALTAIR', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5412, '431230', 'MIRAGUAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1419, '353890', 'PIRAJUI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (307, '211130', 'SAO LUIS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3297, '421810', 'TIMBE DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1191, '231320', 'TAMBORIL', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (829, '350810', 'BURITAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5410, '431210', 'MATA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4618, '293076', 'SITIO DO QUINTO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5498, '313960', 'MANTENA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5010, '313890', 'MACHACALIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3382, '314480', 'NOVA LIMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1542, '316740', 'SILVIANOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5049, '316260', 'SAO JOAO DO ORIENTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4248, '312020', 'CRISTAIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1513, '314625', 'PADRE CARVALHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2337, '291955', 'LUIS EDUARDO MAGALHAES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4791, '510890', 'NOVA MARINGA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (992, '291730', 'ITUBERA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5169, '510700', 'POXOREO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4332, '510840', 'VARZEA GRANDE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2298, '412085', 'QUATRO PONTES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4547, '500570', 'NAVIRAI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2300, '412100', 'QUERENCIA DO NORTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1274, '410120', 'ANTONINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3407, '150405', 'MAE DO RIO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (195, '354105', 'PRATANIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3486, '351590', 'FLOREAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3552, '352930', 'MATAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3185, '410360', 'CAMBARA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2246, '411300', 'JUSSARA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1327, '411585', 'MERCEDES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4090, '172208', 'WANDERLANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5312, '521160', 'IVOLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3613, '421000', 'LUIZ ALVES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2772, '410080', 'ALVORADA DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4109, '210203', 'BOM JESUS DAS SELVAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5438, '430597', 'COXILHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2293, '412000', 'PORECATU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2551, '150580', 'PORTEL', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3725, '431842', 'SAO JOAO DA URTIGA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1333, '411729', 'NOVO ITACOLOMI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3289, '320490', 'SAO MATEUS', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3744, '432020', 'SEBERI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4107, '210190', 'BEQUIMAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4701, '430710', 'HERVAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1861, '291990', 'MACURURE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4379, '521565', 'PALESTINA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4630, '220990', 'SAO JOAO DA SERRA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3693, '431477', 'PONTAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2383, '270130', 'CAJUEIRO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2716, '350780', 'BRODOWSKI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (403, '310945', 'CABECEIRA GRANDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3778, '353550', 'PARAGUACU PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5132, '431267', 'NICOLAU VERGUEIRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (262, '250720', 'ITATUBA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4084, '172049', 'SAO VALERIO DA NATIVIDADE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2700, '350520', 'BARIRI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1503, '314030', 'MARLIERIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4253, '312100', 'DATAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2371, '251620', 'SOUSA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4001, '421140', 'NOVA ERECHIM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (318, '211240', 'TURIACU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1878, '293070', 'SIMOES FILHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4475, '311080', 'CAMPANARIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3671, '270330', 'INHAPI', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1443, '354400', 'RIO DAS PEDRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5068, '316550', 'SARDOA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (192, '354050', 'PORANGABA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3357, '500390', 'FIGUEIRAO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (894, '420005', 'ABDON BATISTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1708, '312707', 'FRUTA DE LEITE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2694, '350390', 'ARUJA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5063, '316460', 'SAO SEBASTIAO DO OESTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1912, '251272', 'PEDRO REGIS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1910, '251200', 'POCINHOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1421, '354000', 'POMPEIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4366, '521170', 'JANDAIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3206, '411965', 'PITANGUEIRAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3423, '160080', 'VITORIA DO JARI', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3793, '210540', 'ITAPECURU MIRIM', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5071, '316560', 'SENADOR CORTES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1483, '312650', 'FRANCISCO BADARO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (965, '291180', 'GUARATINGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1976, '431120', 'JULIO DE CASTILHOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (659, '110094', 'CUJUBIM', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3455, '351030', 'CAPELA DO ALTO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2594, '170300', 'BABACULANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3836, '351050', 'CARAGUATATUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1670, '351220', 'CONCHAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3005, '310070', 'AGUA COMPRIDA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2483, '220213', 'CAMPO GRANDE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3118, '354325', 'RIBEIRAO GRANDE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1466, '355290', 'TACIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4473, '311000', 'CAETE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (46, '220910', 'SANTA CRUZ DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2101, '150200', 'CACHOEIRA DO ARARI', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3397, '150095', 'AURORA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (310, '211163', 'SAO RAIMUNDO DO DOCA BEZERRA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5286, '520570', 'CORREGO DO OURO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2135, '211290', 'VITORIA DO MEARIM', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2475, '210340', 'COELHO NETO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3975, '310540', 'BARAO DE COCAIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1413, '314730', 'PARAISOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3104, '313505', 'JAIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1736, '313220', 'ITAGUARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5300, '520910', 'GOIATUBA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5450, '500210', 'BELA VISTA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4330, '510800', 'TAPURAH', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1080, '251370', 'SANTA RITA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1316, '411280', 'JOAQUIM TAVORA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1916, '251350', 'SANTANA DE MANGUEIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2065, '410160', 'ARAPOTI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2070, '411510', 'MARILUZ', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2270, '411670', 'NOVA AURORA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (126, '230860', 'MONSENHOR TABOSA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3379, '314360', 'MORRO DA GARCA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3204, '411790', 'PALOTINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2413, '311350', 'CARBONITA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5454, '500510', 'JATEI', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2247, '411320', 'LAPA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1286, '410445', 'CANTAGALO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5295, '520790', 'FLORES DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4216, '210740', 'OLHO D''AGUA DAS CUNHAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2827, '270150', 'CAMPO GRANDE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4161, '210840', 'PERI MIRIM', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5119, '430925', 'GUABIJU', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4412, '430560', 'COLORADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1115, '270580', 'OLHO D''AGUA DO CASADO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3290, '320500', 'SERRA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3640, '421720', 'SAO MIGUEL DO OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4000, '421130', 'NAVEGANTES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1789, '320130', 'CARIACICA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5092, '110146', 'PIMENTEIRAS DO OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (833, '350990', 'CANANEIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (340, '220196', 'BRASILEIRA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5373, '430840', 'FORMIGUEIRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3457, '351070', 'CARDOSO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4042, '421690', 'SAO LOURENCO DO OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (543, '250250', 'BOQUEIRAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (533, '250080', 'ARACAGI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3051, '260560', 'FLORES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5261, '520120', 'ANHANGUERA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2106, '150480', 'MONTE ALEGRE', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2500, '240080', 'ANGICOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2179, '316870', 'TIMOTEO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1139, '354220', 'RANCHARIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3637, '421630', 'SAO JOAO BATISTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4628, '220960', 'SAO FELIX DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (617, '290170', 'ANTONIO CARDOSO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2197, '320450', 'SANTA LEOPOLDINA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (585, '280280', 'INDIAROBA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5330, '521490', 'NOVA ROMA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2139, '220559', 'LAGOA DO SITIO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3855, '412853', 'VENTANIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3427, '170230', 'ARAPOEMA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5203, '522100', 'TAQUARAL DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2876, '210830', 'PENALVA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1706, '420960', 'LAURO MULLER', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4006, '421185', 'OURO VERDE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2718, '350830', 'CABRALIA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2568, '150790', 'SOURE', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1233, '310500', 'BALDIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4289, '352044', 'ILHA SOLTEIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3574, '352810', 'MACAUBAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3672, '430400', 'CAMPO NOVO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1230, '310220', 'ALVARENGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2624, '171525', 'NOVO JARDIM', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1470, '355395', 'TARUMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3461, '351160', 'CESARIO LANGE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2638, '330120', 'CARMO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1408, '314655', 'PAI PEDRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (342, '220202', 'BURITI DOS MONTES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4188, '210050', 'ALTO PARNAIBA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4266, '312340', 'DORESOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3063, '260790', 'JABOATAO DOS GUARARAPES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5103, '171620', 'PARANA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4300, '352710', 'LINS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2562, '150720', 'SAO DOMINGOS DO CAPIM', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2739, '355200', 'SILVEIRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5348, '521860', 'RIALMA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5346, '521830', 'POSSE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2926, '291165', 'GUAJERU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2428, '312110', 'DELFIM MOREIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5579, '315100', 'PIRANGUINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1712, '312750', 'GONZAGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (75, '230070', 'ALTO SANTO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5505, '316210', 'SAO GOTARDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1991, '150150', 'BENEVIDES', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5166, '510670', 'PONTE BRANCA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4790, '510880', 'NOVA GUARITA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (569, '250700', 'ITAPORANGA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4438, '220335', 'DIRCEU ARCOVERDE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1289, '410550', 'CIANORTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1140, '354230', 'REDENCAO DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1860, '291940', 'LICINIO DE ALMEIDA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2306, '412190', 'RIBEIRAO DO PINHAL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2206, '330225', 'ITATIAIA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (161, '353460', 'OSVALDO CRUZ', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2639, '330140', 'CONCEICAO DE MACABU', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3012, '410760', 'FAXINAL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4902, '432360', 'VISTA ALEGRE DO PRATA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (720, '150100', 'AVEIRO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1700, '420880', 'JAGUARUNA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2432, '312247', 'DOM BOSCO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2069, '411120', 'ITAPEJARA D''OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1435, '354280', 'RIBEIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (100, '230440', 'FORTALEZA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1342, '412010', 'PORTO AMAZONAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4659, '230837', 'MIRAIMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2941, '230880', 'MORAUJO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4177, '211060', 'SAO BERNARDO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3980, '310650', 'BERILO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (96, '230426', 'DEPUTADO IRAPUAN PINHEIRO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3183, '410300', 'BOA ESPERANCA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (389, '220640', 'MONSENHOR GIL', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3614, '421020', 'MAJOR GERCINO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5089, '110003', 'CABIXI', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2109, '150565', 'PLACAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4414, '430585', 'COQUEIROS DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3296, '421800', 'TIJUCAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1973, '261630', 'VICENCIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4936, '500720', 'RIO BRILHANTE', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4423, '220115', 'BAIXA GRANDE DO RIBEIRO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4581, '316340', 'SAO JOSE DO GOIABAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4234, '311783', 'CONEGO MARINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3127, '412065', 'QUARTO CENTENARIO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3133, '421187', 'PAIAL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4457, '220750', 'PALMEIRAIS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5065, '316490', 'SAO SEBASTIAO DO RIO VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (87, '230260', 'CAMOCIM', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (928, '420440', 'CORONEL FREITAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4734, '431265', 'NAO-ME-TOQUE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3848, '351810', 'GUARANTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1040, '250730', 'JACARAU', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2301, '412125', 'RAMILANDIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2082, '431450', 'PINHEIRO MACHADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (165, '353540', 'PANORAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5366, '430750', 'ESPUMOSO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4051, '241060', 'RAFAEL GODEIRO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4896, '310520', 'BANDEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1275, '410165', 'ARAPUA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1439, '354330', 'RIBEIRAO PIRES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4225, '211065', 'SAO DOMINGOS DO AZEITAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3562, '352230', 'ITAPETININGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3069, '260875', 'LAGOA GRANDE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1245, '311380', 'CARMESIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1161, '354630', 'SANTA CRUZ DAS PALMEIRAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2523, '261310', 'SAO CAITANO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3163, '315840', 'SANTANA DE CATAGUASES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (843, '351430', 'DOURADO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3120, '355350', 'TAPIRAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (48, '220937', 'SANTA ROSA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1703, '420917', 'JUPIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3131, '420530', 'FAXINAL DOS GUEDES', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (519, '241370', 'SITIO NOVO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4091, '172210', 'XAMBIOA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3022, '260005', 'ABREU E LIMA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1348, '412160', 'RENASCENCA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2002, '171880', 'SAMPAIO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4208, '210560', 'JOSELANDIA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4392, '430360', 'CAMBARA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2134, '211160', 'SAO RAIMUNDO DAS MANGABEIRAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4224, '211027', 'SANTO AMARO DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (431, '352330', 'ITARIRI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5303, '520970', 'HIDROLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4469, '310950', 'CABO VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2023, '521375', 'MONTIVIDIU', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3383, '314510', 'NOVA RESENDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (981, '291480', 'ITABUNA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5173, '510724', 'SANTA CARMEM', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4845, '291710', 'ITORORO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (613, '290100', 'AMARGOSA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2811, '410660', 'CRUZEIRO DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2812, '410670', 'CRUZEIRO DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2252, '411390', 'MALLET', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2232, '411090', 'ITAGUAJE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5088, '353820', 'PINHALZINHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4353, '520640', 'CRIXAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2063, '330260', 'MANGARATIBA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1460, '355050', 'SAO PEDRO DO TURVO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2653, '330350', 'NOVA IGUACU', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (760, '292740', 'SALVADOR', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2087, '314100', 'MATO VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1536, '316230', 'SAO JOAO DA MATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3375, '314225', 'MIRAVANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2511, '250820', 'LAGOA DE DENTRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3108, '330227', 'JAPERI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2831, '270220', 'COQUEIRO SECO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (392, '220675', 'NOSSA SENHORA DE NAZARE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5442, '430650', 'DOM FELICIANO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2754, '355480', 'TREMEMBE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (750, '290690', 'CARAVELAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3777, '353500', 'PALESTINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4516, '432237', 'UNISTALDA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2728, '354995', 'SAO LOURENCO DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4100, '210100', 'ARARI', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3424, '170035', 'ALIANCA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1559, '110028', 'ROLIM DE MOURA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2485, '220370', 'ESPERANTINA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3711, '431673', 'SANTA CECILIA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2884, '230445', 'FORTIM', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (752, '290760', 'CENTRAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2823, '270070', 'BATALHA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (737, '150320', 'IGARAPE-ACU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (577, '280130', 'CAPELA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3842, '351410', 'DOIS CORREGOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (854, '351840', 'GUARATINGUETA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5514, '354840', 'SANTOPOLIS DO AGUAPEI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5191, '521970', 'SANTA TEREZINHA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1666, '350570', 'BARUERI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4732, '431244', 'MORRINHOS DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2297, '412060', 'PRUDENTOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3380, '314435', 'NAQUE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (622, '290240', 'AURELINO LEAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1248, '311600', 'CHALE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3401, '150215', 'CANAA DOS CARAJAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4307, '353140', 'MONTE APRAZIVEL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3112, '351290', 'COSMORAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1489, '313055', 'IMBE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5265, '520180', 'ARAGOIANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3611, '420945', 'LAJEADO GRANDE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (596, '280490', 'PACATUBA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3294, '421780', 'TAIO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2586, '170100', 'ANANAS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3286, '261320', 'SAO JOAO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4368, '521230', 'LEOPOLDO DE BULHOES', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (11, '510285', 'CASTANHEIRA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1164, '354670', 'SANTA GERTRUDES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1749, '313460', 'JABOTICATUBAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2449, '270255', 'ESTRELA DE ALAGOAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1159, '354610', 'SANTA CLARA D''OESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2597, '170386', 'CARIRI DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1732, '313130', 'IPATINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2043, '240520', 'JANDUIS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2559, '150660', 'SANTA MARIA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3641, '421740', 'SCHROEDER', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3301, '421885', 'UNIAO DO OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5488, '430510', 'CAXIAS DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3561, '352200', 'ITAJU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3179, '420090', 'ANGELINA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4365, '521130', 'ITARUMA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2731, '355030', 'SAO PAULO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3164, '316270', 'SAO JOAO DO PARAISO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4783, '432132', 'TAQUARUCU DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2954, '292620', 'RIACHAO DAS NEVES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2769, '410030', 'AGUDOS DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5395, '431075', 'IVORA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4606, '250680', 'INGA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (390, '220665', 'MORRO CABECA NO TEMPO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1220, '240400', 'FRUTUOSO GOMES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1028, '292280', 'NOVA ITARANA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1009, '291970', 'MACARANI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5268, '520320', 'BARRO ALTO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5558, '521000', 'INHUMAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1742, '313320', 'ITANHOMI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (230, '230270', 'CAMPOS SALES', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4166, '210920', 'PRESIDENTE JUSCELINO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4268, '312370', 'ENGENHEIRO CALDAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4998, '313710', 'LAGAMAR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (675, '120040', 'RIO BRANCO', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3969, '310420', 'ARCOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4546, '500525', 'LAGUNA CARAPA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3362, '313760', 'LAGOA SANTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4541, '500315', 'CORONEL SAPUCAIA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2948, '292530', 'PORTO SEGURO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (620, '290210', 'ARACI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1276, '410185', 'ARIRANHA DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2795, '410400', 'CAMPINA GRANDE DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3577, '420080', 'ANCHIETA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4278, '312560', 'FELISBURGO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3874, '315213', 'PONTO CHIQUE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5043, '316180', 'SAO GONCALO DO PARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2797, '410425', 'CAMPO MAGRO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3700, '431540', 'REDENTORA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2017, '432220', 'TUPANCIRETA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3563, '352265', 'ITAPIRAPUA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2768, '410020', 'ADRIANOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3323, '430170', 'BARAO DE COTEGIPE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3751, '432070', 'SOBRADINHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1622, '315330', 'PRESIDENTE KUBITSCHEK', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (670, '120017', 'CAPIXABA', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4817, '290130', 'ANDARAI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5188, '521940', 'SANTA RITA DO ARAGUAIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (180, '353810', 'PINDORAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (535, '250130', 'AROEIRAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5352, '430630', 'DAVID CANABARRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4594, '210235', 'BURITIRANA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3755, '432135', 'TAVARES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1802, '320320', 'LINHARES', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3395, '140060', 'SAO LUIZ', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4702, '430740', 'ESMERALDA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1676, '351960', 'IBITINGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2767, '355720', 'CHAVANTES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2741, '355240', 'SUMARE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4507, '311670', 'COIMBRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1655, '330270', 'MARICA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2662, '330480', 'SAO FIDELIS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4990, '150430', 'MARACANA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (952, '290990', 'CURACA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3821, '350410', 'ATIBAIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2575, '150830', 'VISEU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5285, '520549', 'CIDADE OCIDENTAL', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3759, '432163', 'TRES ARROIOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4136, '210530', 'IMPERATRIZ', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (153, '353300', 'NOVA GRANADA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1231, '310250', 'AMPARO DO SERRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4285, '312680', 'FREI GASPAR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1474, '355540', 'UBATUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5461, '510010', 'ACORIZAL', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5474, '520235', 'ARENOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (525, '241460', 'UPANEMA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3654, '430060', 'ALVORADA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4197, '210260', 'CANDIDO MENDES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2569, '150795', 'TAILANDIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1550, '320220', 'FUNDAO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4198, '210280', 'CAROLINA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4472, '310990', 'CAETANOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (959, '291080', 'FEIRA DE SANTANA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4765, '431790', 'SANTO CRISTO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1791, '320160', 'CONCEICAO DA BARRA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2095, '110170', 'URUPA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (926, '420430', 'CONCORDIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (897, '420055', 'AGUAS FRIAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1193, '231335', 'TEJUCUOCA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (846, '351512', 'EMILIANOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3746, '432040', 'SERAFINA CORREA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (102, '230465', 'GRACA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3230, '420665', 'GUATAMBU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5155, '432032', 'SENADOR SALGADO FILHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3624, '421250', 'PENHA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (281, '260795', 'JAQUEIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5459, '500770', 'SETE QUEDAS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5281, '520500', 'CARMO DO RIO VERDE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2508, '250215', 'BOA VISTA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (918, '420330', 'CAMPO ALEGRE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5478, '520753', 'FAINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1740, '313280', 'ITAMBE DO MATO DENTRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1037, '292370', 'PARATINGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1145, '354340', 'RIBEIRAO PRETO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5270, '520380', 'BRITANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5336, '521570', 'PALMEIRAS DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4357, '520815', 'GAMELEIRA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3946, '130150', 'ENVIRA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1514, '314830', 'PAULA CANDIDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1385, '314300', 'MONTE BELO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2530, '290520', 'CAETITE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3911, '312970', 'IBIRACI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3146, '500345', 'DEODAPOLIS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4345, '520410', 'CACHOEIRA ALTA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4946, '510060', 'ALTO TAQUARI', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1589, '150540', 'OUREM', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (19, '510720', 'RIO BRANCO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4569, '510550', 'VILA BELA DA SANTISSIMA TRINDADE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4867, '292710', 'RODELAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4540, '500280', 'CARACOL', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1267, '352800', 'MACATUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3242, '421100', 'MONDAI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2469, '171070', 'ITAGUATINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (649, '110012', 'JI-PARANA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5385, '430975', 'IBARAMA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3480, '351518', 'ESPIRITO SANTO DO PINHAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5055, '316330', 'SAO JOSE DO DIVINO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4710, '430910', 'GRAMADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3515, '352150', 'IRAPUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5034, '316060', 'SANTO HIPOLITO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5407, '431180', 'MARAU', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (131, '230970', 'PACATUBA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2828, '270160', 'CANAPI', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1746, '313410', 'ITUETA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (177, '353740', 'PEREIRA BARRETO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5123, '431050', 'IRAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4212, '210640', 'MATA ROMA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3847, '351730', 'GUAIMBE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2160, '312705', 'FRONTEIRA DOS VALES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3211, '412430', 'SANTO ANTONIO DO PARAISO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2726, '354970', 'SAO JOSE DO RIO PARDO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5574, '314130', 'MEDEIROS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4381, '521680', 'PETROLINA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5120, '430980', 'IBIACA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3739, '431950', 'SAO SEBASTIAO DO CAI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1008, '291960', 'MACAJUBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3410, '150548', 'PACAJA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (625, '290290', 'BARRA DO CHOCA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1, '240810', 'Natal', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2841, '521460', 'NIQUELANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4583, '316447', 'SAO SEBASTIAO DO ANTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1985, '130200', 'ITAPIRANGA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5399, '431123', 'LAGOA BONITA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2916, '290370', 'BOA NOVA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4911, '500150', 'BANDEIRANTES', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3337, '430280', 'CACAPAVA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3809, '320060', 'ARACRUZ', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4833, '291000', 'DARIO MEIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1637, '315620', 'ROCHEDO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4903, '432370', 'VISTA GAUCHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1646, '421750', 'SEARA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5428, '430462', 'CAPAO BONITO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2889, '231330', 'TAUA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3266, '421875', 'TUNAPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4874, '293075', 'SITIO DO MATO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4966, '510390', 'GENERAL CARNEIRO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4987, '150034', 'AGUA AZUL DO NORTE', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (486, '240740', 'MARTINS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3326, '430190', 'BARRA DO RIBEIRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (103, '230470', 'GRANJA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5317, '521260', 'MAIRIPOTABA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1092, '251490', 'SAO MAMEDE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3491, '351710', 'GLICERIO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5504, '315970', 'SANTA ROSA DA SERRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2148, '230425', 'CRUZ', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4275, '312500', 'EWBANK DA CAMARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5557, '520545', 'CEZARINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2320, '313970', 'MARAVILHAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5528, '355420', 'TEJUPA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4155, '210745', 'OLINDA NOVA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4709, '430885', 'GENTIL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (586, '280320', 'ITAPORANGA D''AJUDA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2194, '320310', 'JERONIMO MONTEIRO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1173, '354860', 'SAO BENTO DO SAPUCAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3850, '351900', 'HERCULANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2790, '410335', 'BRAGANEY', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1949, '260770', 'ITAPETIM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4944, '510035', 'ALTO BOA VISTA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2080, '431215', 'MATO LEITAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5164, '510645', 'PLANALTO DA SERRA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2947, '292520', 'POJUCA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2415, '311480', 'CARVALHOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4104, '210140', 'BALSAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1628, '315430', 'RESPLENDOR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3376, '314270', 'MONTALVANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3370, '314055', 'MATA VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4260, '312220', 'DIVINOLANDIA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (984, '291560', 'ITAMARAJU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1182, '231130', 'QUIXADA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4380, '521630', 'PARANAIGUARA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5167, '510675', 'PONTES E LACERDA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3203, '411780', 'PALMITAL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (21, '510795', 'TANGARA DA SERRA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3215, '412660', 'SIQUEIRA CAMPOS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (450, '411060', 'IPORA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3852, '412780', 'TOMAZINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5540, '411730', 'ORTIGUEIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2009, '411980', 'PLANALTO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2633, '330050', 'BOM JARDIM', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1836, '290560', 'CAMACAN', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3148, '510410', 'GUARANTA DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1263, '312352', 'DURANDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2185, '317080', 'VARZEA DA PALMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3360, '313680', 'JURAMENTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1592, '261170', 'RIACHO DAS ALMAS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5118, '430850', 'FREDERICO WESTPHALEN', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (245, '240600', 'JOSE DA PENHA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1523, '315445', 'RIACHINHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1726, '313040', 'IJACI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4415, '430593', 'CORONEL PILAR', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3705, '431590', 'RODEIO BONITO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1698, '420830', 'ITAPEMA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4233, '311760', 'CONCEICAO DO PARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5402, '431150', 'LAVRAS DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4601, '230610', 'IRAUCUBA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4403, '430480', 'CARLOS BARBOSA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4782, '432110', 'TAPES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3954, '310160', 'ALFENAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4112, '210230', 'BURITI BRAVO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3137, '430466', 'CAPAO DO LEAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2349, '240260', 'CEARA-MIRIM', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3134, '421520', 'ROMELANDIA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3080, '261110', 'PETROLINA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2911, '280210', 'ESTANCIA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3691, '431460', 'PIRATINI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (171, '353640', 'PAULICEIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2422, '311810', 'CONGONHAS DO NORTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (91, '230320', 'CARIRIACU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5361, '430697', 'EREBANGO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3002, '310020', 'ABAETE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4239, '311870', 'COQUEIRAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5059, '316410', 'SAO PEDRO DO SUACUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3727, '431845', 'SAO JOSE DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3786, '353920', 'PIRAPOZINHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4241, '311890', 'CORDISBURGO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3861, '314750', 'PASSABEM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4860, '292380', 'PARIPIRANGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (13, '510395', 'GLORIA D''OESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5148, '431770', 'SANTO ANTONIO DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3796, '290340', 'BELMONTE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4773, '431936', 'SAO PEDRO DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (357, '220320', 'CURIMATA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3781, '353660', 'PAULO DE FARIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1685, '353520', 'PALMEIRA D''OESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (857, '351970', 'IBIUNA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3650, '421960', 'XAVANTINA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1693, '420768', 'IPUACU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (250, '241050', 'RAFAEL FERNANDES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4149, '210667', 'MILAGRES DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1149, '354420', 'RIOLANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1831, '290323', 'BARRO ALTO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (62, '221062', 'SEBASTIAO BARROS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3119, '354810', 'SANTO ANTONIO DO JARDIM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2890, '231410', 'VICOSA DO CEARA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4835, '291077', 'FEIRA DA MATA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3433, '170460', 'CHAPADA DE AREIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4129, '210450', 'GOVERNADOR ARCHER', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1793, '320190', 'DOMINGOS MARTINS', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5293, '520725', 'DOVERLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5016, '315780', 'SANTA LUZIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4363, '521040', 'ITABERAI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5343, '521740', 'PIRES DO RIO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1787, '320090', 'BARRA DE SAO FRANCISCO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3944, '130083', 'CAAPIRANGA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4378, '521540', 'OURO VERDE DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2057, '292690', 'RIO DO PIRES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3960, '310260', 'ANDRADAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1381, '314230', 'MOEDA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3373, '314160', 'MERCES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2177, '313540', 'JECEABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1529, '315770', 'SANTA JULIANA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4479, '311140', 'CAMPO FLORIDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2883, '230410', 'CRATEUS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4930, '500568', 'MUNDO NOVO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (733, '150290', 'CURUCA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (555, '250430', 'CATOLE DO ROCHA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1288, '410510', 'CENTENARIO DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2792, '410345', 'CAFELANDIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3016, '410790', 'FLORESTA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1302, '410890', 'GUAIRACA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (178, '353760', 'PERUIBE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3475, '351440', 'DRACENA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1811, '320430', 'PRESIDENTE KENNEDY', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4128, '210430', 'GODOFREDO VIANA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5379, '430905', 'GLORINHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4731, '431237', 'MONTE ALEGRE DOS CAMPOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3687, '431420', 'PEDRO OSORIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (855, '351885', 'GUATAPARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3138, '430645', 'DOIS LAJEADOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4974, '510562', 'MIRASSOL D''OESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2443, '270010', 'AGUA BRANCA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1042, '250770', 'JUAZEIRINHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5000, '313740', 'LAGOA DOURADA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2076, '430620', 'CRUZEIRO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2850, '110002', 'ARIQUEMES', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (718, '150080', 'ANANINDEUA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3498, '351850', 'GUAREI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4726, '431171', 'MACAMBARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1321, '411430', 'MANDIRITUBA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2282, '411850', 'PATO BRANCO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4101, '210110', 'AXIXA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5046, '316240', 'SAO JOAO DA PONTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1738, '313260', 'ITAMARATI DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (53, '220987', 'SAO JOAO DA FRONTEIRA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2528, '270710', 'PIRANHAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (452, '411295', 'JURANDA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3494, '351780', 'GUARACAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (693, '130220', 'JURUA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2619, '171280', 'MAURILANDIA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (85, '230205', 'BARROQUINHA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4750, '431478', 'PONTE PRETA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5510, '354640', 'SANTA CRUZ DO RIO PARDO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3255, '421470', 'RIO DOS CEDROS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1017, '292100', 'MATA DE SAO JOAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2454, '270560', 'NOVO LINO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3274, '430223', 'BOA VISTA DO INCRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5483, '521410', 'MUTUNOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (841, '351350', 'CUBATAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1639, '315645', 'ROSARIO DA LIMEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4083, '172025', 'SAO SALVADOR DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4682, '240380', 'FLORANIA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1188, '231280', 'SENADOR SA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3685, '431410', 'PASSO FUNDO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (668, '120001', 'ACRELANDIA', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (478, '240580', 'JOAO CAMARA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3227, '420540', 'FLORIANOPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4454, '220669', 'MURICI DOS PORTELAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (148, '353230', 'NATIVIDADE DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (836, '351110', 'CATANDUVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2187, '317160', 'VIRGEM DA LAPA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4819, '290230', 'ARATUIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3106, '320080', 'BAIXO GUANDU', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (190, '354025', 'PONTALINDA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (688, '130140', 'EIRUNEPE', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3437, '170765', 'FIGUEIROPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4757, '431595', 'ROLADOR', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3425, '170105', 'ANGICO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3599, '420570', 'GAROPABA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (807, '330475', 'SAO FRANCISCO DE ITABAPOANA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5365, '430745', 'ESPERANCA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (665, '110148', 'SAO FELIPE D''OESTE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4397, '430430', 'CANDIDO GODOI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (775, '311800', 'CONGONHAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1241, '311210', 'CAPARAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2588, '170130', 'ARAGOMINAS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4968, '510455', 'ITAUBA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3787, '353960', 'PLANALTO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4202, '210420', 'FORTUNA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2710, '350690', 'BOFETE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4377, '521520', 'NOVO BRASIL', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4364, '521080', 'ITAJA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2949, '292550', 'PRADO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3876, '315280', 'PRATA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4993, '313652', 'JOSE GONCALVES DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3805, '313210', 'ITACARAMBI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1399, '314530', 'NOVO CRUZEIRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1382, '314240', 'MOEMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5168, '510682', 'PORTO ESPERIDIAO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5448, '500025', 'ALCINOPOLIS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1929, '260050', 'AGUAS BELAS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2898, '250970', 'MONTEIRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5451, '500220', 'BONITO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3129, '412800', 'UBIRATA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3199, '411373', 'LUIZIANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1283, '410347', 'CAFEZAL DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (788, '313390', 'ITAVERAVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2838, '521200', 'JAUPACI', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3518, '352215', 'ITAOCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2292, '411995', 'PONTAL DO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3762, '432185', 'TRES PALMEIRAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3083, '261153', 'QUIXABA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (954, '291010', 'DOM BASILIO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1750, '313470', 'JACINTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3659, '430155', 'AUREA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2004, '330095', 'COMENDADOR LEVY GASPARIAN', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2021, '520360', 'BRAZABRANTES', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (356, '220310', 'CRISTINO CASTRO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2104, '150410', 'MAGALHAES BARATA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2626, '320517', 'VILA VALERIO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (455, '411573', 'MATO RICO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3770, '353215', 'NANTES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5429, '430469', 'CAPITAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (574, '280050', 'AREIA BRANCA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1121, '270690', 'PILAR', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5163, '510631', 'NOVO SANTO ANTONIO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5241, '311490', 'CASA GRANDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (735, '150304', 'FLORESTA DO ARAGUAIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5106, '420200', 'BALNEARIO CAMBORIU', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (428, '351230', 'CONCHAS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4262, '312250', 'DOM CAVATI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1701, '420900', 'JOACABA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1756, '316805', 'TAPARUBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3607, '420810', 'ITAIOPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (432, '352670', 'LEME', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4072, '250240', 'BONITO DE SANTA FE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2272, '411695', 'NOVA ESPERANCA DO SUDOESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (167, '353580', 'PARANAPANEMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5213, '522230', 'VILA PROPICIO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3951, '130320', 'NOVO AIRAO', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (834, '351010', 'CANDIDO RODRIGUES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4593, '171650', 'PEDRO AFONSO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (892, '412865', 'VIRMOND', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4137, '210535', 'ITAIPAVA DO GRAJAU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4662, '231010', 'PALMACIA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (213, '211285', 'VILA NOVA DOS MARTIRIOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4294, '352340', 'ITATIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5208, '522155', 'TURVELANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4962, '510343', 'CURVELANDIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (796, '320140', 'CASTELO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1672, '351520', 'ESTRELA D''OESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2646, '330230', 'LAJE DO MURIAE', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2878, '210980', 'SANTA HELENA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3085, '261200', 'SAIRE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2595, '170310', 'BARROLANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3981, '310660', 'BERTOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (623, '290250', 'BAIANOPOLIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3979, '310620', 'BELO HORIZONTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3507, '352040', 'ILHABELA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3602, '420670', 'HERVAL D''OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5087, '353770', 'PIACATU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (89, '230290', 'CAPISTRANO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4807, '280445', 'NOSSA SENHORA APARECIDA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2621, '171370', 'MONTE SANTO DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4567, '510510', 'JUARA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5130, '431240', 'MONTENEGRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4416, '430595', 'COTIPORA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (781, '312770', 'GOVERNADOR VALADARES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2576, '160010', 'AMAPA', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3513, '352120', 'IPORANGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4931, '500620', 'NOVA ANDRADINA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (205, '210590', 'LAGO VERDE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (17, '510628', 'NOVO SAO JOAQUIM', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4844, '291680', 'ITARANTIM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1587, '520890', 'GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1748, '313440', 'ITURAMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (774, '311570', 'CENTRAL DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3957, '310190', 'ALPINOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5507, '230510', 'GUARAMIRANGA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1237, '311040', 'CAMACHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (412, '313580', 'JEQUITINHONHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1762, '316905', 'TOCOS DO MOJI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1713, '312780', 'GRAO MOGOL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1613, '315200', 'POMPEU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4468, '310925', 'BUGRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4236, '311790', 'CONGONHAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4958, '510320', 'COLIDER', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4787, '510830', 'UNIAO DO SUL', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (545, '250270', 'BORBOREMA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3093, '261370', 'SAO LOURENCO DA MATA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2258, '411500', 'MARILENA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1349, '412200', 'RIO AZUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2564, '150740', 'SAO FRANCISCO DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4652, '230565', 'IPAPORANGA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (209, '210850', 'PINDARE-MIRIM', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (376, '220530', 'JERUMENHA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1765, '316950', 'TUMIRITINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (621, '290225', 'ARATACA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (458, '411840', 'PARANAVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4291, '352170', 'ITABERA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2425, '311960', 'CORONEL PACHECO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5232, '250750', 'JOAO PESSOA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5058, '316390', 'SAO PEDRO DA UNIAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1866, '292273', 'NOVA FATIMA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3075, '260980', 'OROCO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (341, '220200', 'BURITI DOS LOPES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4026, '421500', 'RIO NEGRINHO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (772, '311360', 'CAREACU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (974, '291350', 'IGUAI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2793, '410370', 'CAMBE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1956, '260990', 'OURICURI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4272, '312410', 'ESMERALDAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1572, '220210', 'CAMPINAS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5471, '510730', 'SAO JOSE DO RIO CLARO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4210, '210600', 'LIMA CAMPOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (373, '220510', 'ITAUEIRA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4945, '510050', 'ALTO PARAGUAI', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1882, '171720', 'PIRAQUE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1932, '260140', 'BARREIROS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1481, '312470', 'ESTRELA DO INDAIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2202, '330060', 'BOM JESUS DO ITABAPOANA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3698, '431520', 'PUTINGA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (490, '240830', 'NOVA CRUZ', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1933, '260180', 'BETANIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5272, '520396', 'BURITINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (92, '230340', 'CARNAUBAL', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3421, '160023', 'FERREIRA GOMES', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3702, '431560', 'RIO GRANDE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (358, '220323', 'CURRAIS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5430, '430485', 'CARLOS GOMES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (808, '330570', 'SUMIDOURO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5135, '431342', 'NOVO MACHADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3328, '430200', 'BARROS CASSAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1633, '315560', 'RIO PARDO DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3998, '421085', 'MIRIM DOCE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (246, '240690', 'LUCRECIA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (273, '251615', 'SOSSEGO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5136, '431395', 'PANTANO GRANDE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2266, '411600', 'MIRASELVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3331, '430225', 'BOA VISTA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3321, '430130', 'ARROIO GRANDE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3935, '120013', 'BUJARI', 5);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2513, '251203', 'POCO DANTAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (703, '130406', 'TABATINGA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5530, '355470', 'TORRINHA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2750, '355410', 'TAUBATE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1129, '270830', 'SAO JOSE DA LAJE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1913, '251278', 'RIACHO DE SANTO ANTONIO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2579, '160025', 'ITAUBAL', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3567, '352460', 'JACUPIRANGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3519, '352220', 'ITAPECERICA DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3811, '330420', 'RESENDE', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4016, '421330', 'PONTE ALTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2211, '330430', 'RIO BONITO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3788, '354020', 'PONTAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (194, '354090', 'PRADOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (363, '220360', 'ELISEU MARTINS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (331, '220110', 'AVELINO LOPES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3466, '351270', 'CORUMBATAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1561, '130340', 'PARINTINS', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4390, '522010', 'SAO LUIS DE MONTES BELOS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5274, '520425', 'CACHOEIRA DOURADA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2939, '230600', 'IRACEMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (32, '520993', 'INACIOLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3973, '310490', 'BAEPENDI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5500, '314690', 'PAPAGAIOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (88, '230280', 'CANINDE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4985, '130190', 'ITACOATIARA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1709, '312730', 'GALILEIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4941, '500800', 'TERENOS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4565, '510420', 'GUIRATINGA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5452, '500290', 'CASSILANDIA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3054, '260600', 'GARANHUNS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3066, '260825', 'JUCATI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2295, '412035', 'PRANCHITA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1420, '353940', 'PIRATININGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3307, '421970', 'XAXIM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2191, '320225', 'GOVERNADOR LINDENBERG', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (258, '250390', 'CAMALAU', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (909, '420213', 'BELA VISTA DO TOLDO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2907, '270280', 'FLEXEIRAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2350, '240375', 'FERNANDO PEDROZA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1838, '290682', 'CANUDOS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (16, '510618', 'NOVA LACERDA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3880, '315480', 'RIO ACIMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5176, '510740', 'SAO PEDRO DA CIPA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1707, '312700', 'FRONTEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5435, '430550', 'CIRIACO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1369, '314010', 'MARILAC', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3188, '410630', 'CORBELIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4070, '250160', 'BARRA DE SANTA ROSA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1761, '316890', 'TIROS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3984, '310700', 'BIQUINHAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5370, '430790', 'FARROUPILHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1546, '317047', 'URUANA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (444, '410500', 'CATANDUVAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1162, '354650', 'SANTA ERNESTINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2123, '170382', 'CACHOEIRINHA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2521, '261020', 'PANELAS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2162, '312740', 'GONCALVES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5149, '431795', 'SANTO EXPEDITO DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (169, '353610', 'PARDINHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4984, '110018', 'PIMENTA BUENO', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3281, '430512', 'CERRITO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (741, '150370', 'ITUPIRANGA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4799, '280140', 'CARIRA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (780, '312490', 'EUGENOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1495, '313530', 'JAPARAIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3330, '430220', 'BOA VISTA DO BURICA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4870, '292905', 'SAO FELIX DO CORIBE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4396, '430410', 'CAMPOS BORGES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3221, '420285', 'BRACO DO TROMBUDO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (297, '310300', 'ANTONIO DIAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5113, '430695', 'ENTRE RIOS DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3435, '170700', 'DIANOPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (99, '230430', 'FARIAS BRITO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2652, '330340', 'NOVA FRIBURGO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3516, '352180', 'ITAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (159, '353400', 'ONDA VERDE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3292, '320510', 'VIANA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (182, '353850', 'PIQUETE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (181, '353830', 'PIQUEROBI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (827, '350740', 'BORBOREMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5390, '431041', 'INHACORA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2100, '140070', 'UIRAMUTA', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2998, '293330', 'VITORIA DA CONQUISTA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5575, '314330', 'MONTES CLAROS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2583, '170025', 'ABREULANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (326, '220040', 'ALTOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1138, '354210', 'RAFARD', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4337, '520082', 'AMARALINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1006, '291915', 'LAPAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1883, '171750', 'PIUM', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1581, '220020', 'AGUA BRANCA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (218, '220342', 'DOMINGOS MOURAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5252, '315910', 'SANTANA DOS MONTES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3858, '420040', 'AGUA DOCE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4855, '292205', 'MULUNGU DO MORRO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3229, '420610', 'GRAO PARA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2505, '241120', 'SANTA CRUZ', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1235, '310870', 'BRAS PIRES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (651, '110025', 'PRESIDENTE MEDICI', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1452, '354680', 'SANTA ISABEL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2695, '350420', 'AURIFLAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1857, '291860', 'JUSSIAPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5334, '521550', 'OUVIDOR', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (982, '291490', 'ITACARE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3381, '314440', 'NATERCIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1648, '316540', 'SAPUCAI-MIRIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2149, '230500', 'GUARACIABA DO NORTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1759, '316860', 'TEOFILO OTONI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1260, '312140', 'DESTERRO DE ENTRE RIOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5573, '313880', 'LUZ', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4504, '311630', 'CIPOTANEA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2491, '221110', 'UNIAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5262, '520140', 'APARECIDA DE GOIANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3037, '260310', 'CACHOEIRINHA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1972, '261590', 'TUPARETAMA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1459, '355040', 'SAO PEDRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5238, '353070', 'MOGI GUACU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3432, '170390', 'CASEARA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3982, '310670', 'BETIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2221, '410900', 'GUAPIRAMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (520, '241380', 'TABOLEIRO GRANDE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4883, '310010', 'ABADIA DOS DOURADOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3662, '430192', 'BARRA DO RIO AZUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2582, '160060', 'SANTANA', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2372, '251660', 'TAVARES', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2255, '411440', 'MANGUEIRINHA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3663, '430210', 'BENTO GONCALVES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3945, '130115', 'CAREIRO DA VARZEA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1752, '313535', 'JAPONVAR', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4678, '240210', 'CAMPO REDONDO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1849, '291340', 'IGAPORA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (632, '290410', 'BOQUIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1100, '251640', 'CAMPO DE SANTANA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4768, '431844', 'SAO JORGE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (64, '221070', 'SIMOES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3757, '432146', 'TIO HUGO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2053, '290720', 'CASA NOVA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4824, '290480', 'CAATIBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (25, '520250', 'ARUANA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (24, '520085', 'AMERICANO DO BRASIL', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4478, '311120', 'CAMPO BELO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4532, '432340', 'VILA MARIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2294, '412015', 'PORTO BARREIRO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2600, '170510', 'CHAPADA DA NATIVIDADE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3860, '314675', 'PALMOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4780, '432067', 'SINIMBU', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3304, '421910', 'VARGEAO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3994, '421030', 'MAJOR VIEIRA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1290, '410580', 'COLOMBO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3801, '293100', 'TANHACU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3886, '315727', 'SANTA BARBARA DO MONTE VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3902, '290740', 'CATOLANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4705, '430800', 'FAXINAL DO SOTURNO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5111, '430660', 'DOM PEDRITO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1155, '354515', 'SALTINHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2102, '150260', 'COLARES', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2122, '170307', 'BARRA DO OURO', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3773, '353320', 'NOVA INDEPENDENCIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3237, '420870', 'JACINTO MACHADO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (446, '410740', 'ENEAS MARQUES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4125, '210407', 'FEIRA NOVA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4961, '510340', 'CUIABA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1268, '353980', 'POA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4582, '316400', 'SAO PEDRO DOS FERROS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5556, '520060', 'ALTO PARAISO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2902, '260260', 'BREJO DA MADRE DE DEUS', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1630, '315460', 'RIBEIRAO DAS NEVES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4718, '431055', 'ITACURUBI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3966, '310375', 'ARAPORA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4328, '510787', 'SAPEZAL', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3634, '421565', 'SANTA ROSA DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1105, '270370', 'JARAMATAIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1578, '231140', 'QUIXERAMOBIM', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3492, '351740', 'GUAIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (188, '353970', 'PLATINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2216, '330610', 'VALENCA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5008, '313868', 'LUISLANDIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1515, '314880', 'PEDRA DO ANTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (761, '292780', 'SANTA CRUZ DA VITORIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (635, '290470', 'BUERAREMA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2729, '355000', 'SAO LUIS DO PARAITINGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1425, '354080', 'POTIRENDABA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (426, '350440', 'AVANHANDAVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2702, '350550', 'BARRETOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4292, '352210', 'ITANHAEM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4156, '210750', 'PACO DO LUMIAR', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3058, '260700', 'INAJA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4739, '431337', 'NOVA SANTA RITA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4245, '311980', 'CORREGO DANTA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1875, '292910', 'SAO FELIPE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3990, '310840', 'BOTELHOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2406, '311030', 'CALDAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1595, '314860', 'PECANHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1564, '150803', 'TRACUATEUA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (628, '290320', 'BARREIRAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (471, '412860', 'VERE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2289, '411940', 'PIRAI DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3854, '412820', 'UNIAO DA VITORIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1358, '412440', 'SANTO ANTONIO DO SUDOESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5427, '430450', 'CANGUCU', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2412, '311310', 'CARANAIBA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1027, '292265', 'NORDESTINA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5371, '430810', 'FELIZ', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (902, '420140', 'ARARANGUA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (454, '411420', 'MANDAGUARI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2379, '261070', 'PAULISTA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1338, '411900', 'PEROLA D''OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5185, '521920', 'SANTA CRUZ DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3298, '421835', 'TREVISO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3205, '411890', 'PEROLA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1865, '292260', 'NILO PECANHA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4194, '210210', 'BREJO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (626, '290300', 'BARRA DO MENDES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4663, '231030', 'PARAMBU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1776, '317110', 'VERISSIMO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1272, '410070', 'ALTO PIQUIRI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3929, '110092', 'CHUPINGUAIA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4741, '431360', 'PAIM FILHO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1392, '314430', 'NANUQUE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2361, '250400', 'CAMPINA GRANDE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1370, '314020', 'MARIPA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2874, '210480', 'GRAJAU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2748, '355390', 'TARABAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (277, '260220', 'BOM JARDIM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4080, '171890', 'SANTA ROSA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2411, '311270', 'CAPITAO ENEAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1990, '150070', 'ANAJAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2254, '411435', 'MANFRINOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4185, '172130', 'TUPIRATINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4704, '430783', 'EUGENIO DE CASTRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3130, '420220', 'BENEDITO NOVO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1758, '316830', 'TAQUARACU DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4672, '240010', 'ACARI', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (949, '290920', 'CORONEL JOAO SA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (929, '420445', 'CORONEL MARTINS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2374, '260410', 'CARUARU', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (494, '240890', 'PARELHAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4848, '291845', 'JUCURUCU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3150, '510794', 'TABAPORA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3703, '431570', 'RIO PARDO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3544, '352730', 'LOUVEIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1249, '311610', 'CHAPADA DO NORTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2291, '411970', 'PLANALTINA DO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (784, '313100', 'INHAUMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5324, '521377', 'MONTIVIDIU DO NORTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3620, '421175', 'OTACILIO COSTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4271, '312400', 'ERVALIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4435, '220275', 'COLONIA DO GURGUEIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4646, '230330', 'CARIUS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5292, '520710', 'DIORAMA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3499, '351860', 'GUARIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3651, '430003', 'ACEGUA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2935, '291880', 'LAJE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3598, '420550', 'FRAIBURGO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2842, '521525', 'NOVO PLANALTO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4994, '313660', 'NOVA UNIAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (778, '312040', 'CRISTIANO OTONI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4361, '520980', 'HIDROLINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1056, '251000', 'NAZAREZINHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (362, '220345', 'DOM INOCENCIO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (899, '420075', 'ALTO BELA VISTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5263, '520145', 'APARECIDA DO RIO DOCE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4976, '510610', 'NOSSA SENHORA DO LIVRAMENTO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4351, '520551', 'COCALZINHO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3991, '420990', 'LONTRAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4431, '220225', 'CANAVIEIRA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1845, '291125', 'GAVIAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1721, '312930', 'IAPU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1010, '292000', 'MAIQUINIQUE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (633, '290450', 'BROTAS DE MACAUBAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2973, '292930', 'SAO GONCALO DOS CAMPOS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1830, '290260', 'BAIXA GRANDE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1030, '292290', 'NOVA SOURE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2414, '311440', 'CARMO DO RIO CLARO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5578, '314930', 'PEDRO LEOPOLDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4491, '311420', 'CARMO DO CAJURU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5032, '316040', 'SANTO ANTONIO DO MONTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1199, '231400', 'VARZEA ALEGRE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (615, '290135', 'ANDORINHA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2950, '292580', 'QUEIMADAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5299, '520880', 'GOIANIRA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5202, '522060', 'SILVANIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5307, '521030', 'ISRAELANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (865, '412500', 'SAO JOAO DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (460, '412033', 'PRADO FERREIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2311, '412270', 'SABAUDIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (887, '412810', 'UMUARAMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (886, '412790', 'TUNEIRAS DO OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2234, '411110', 'ITAMBE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2650, '330310', 'NATIVIDADE', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4035, '421590', 'SAO BONIFACIO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (483, '240710', 'MACAIBA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3652, '430030', 'ALECRIM', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2946, '292510', 'POCOES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (410, '313200', 'ITACAMBIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4609, '260370', 'CANHOTINHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2288, '411930', 'PINHAO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5189, '521945', 'SANTA RITA DO NOVO DESTINO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3895, '316730', 'SILVEIRANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (531, '250060', 'ALHANDRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5207, '522145', 'TROMBAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2554, '150611', 'QUATIPURU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4673, '240050', 'ALEXANDRIA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2865, '150680', 'SANTAREM', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1317, '411310', 'KALORE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5422, '431290', 'NOVA BASSANO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1250, '311615', 'CHAPADA GAUCHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1557, '320501', 'SOORETAMA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4404, '430490', 'CASCA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3775, '353410', 'ORIENTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3905, '292020', 'MALHADA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4486, '311290', 'CAPUTIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2462, '270890', 'SATUBA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2982, '293080', 'SOUTO SOARES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5204, '522108', 'TERESINA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1441, '354370', 'RINCAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1141, '354260', 'REGISTRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (112, '230625', 'ITAITINGA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2384, '270170', 'CAPELA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1961, '261220', 'SALGUEIRO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2634, '330070', 'CABO FRIO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2589, '170210', 'ARAGUAINA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2668, '330575', 'TANGUA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1130, '270880', 'SAO SEBASTIAO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4138, '210545', 'JATOBA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1171, '354820', 'SANTO ANTONIO DO PINHAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3934, '110180', 'VALE DO PARAISO', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4816, '290090', 'ALMADINA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (164, '353530', 'PALMITAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5082, '316790', 'TABULEIRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4089, '172100', 'PALMAS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4200, '210380', 'DOM PEDRO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3619, '421145', 'NOVA ITABERABA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1656, '330290', 'MIGUEL PEREIRA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2032, '211210', 'TIMBIRAS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3011, '410754', 'ESPIGAO ALTO DO IGUACU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1154, '354510', 'SALMOURAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1657, '330330', 'NITEROI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3522, '352270', 'ITAPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1678, '352300', 'ITAPURA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2770, '410045', 'ALTAMIRA DO PARANA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2617, '171215', 'LAVANDEIRA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (414, '317050', 'URUCANIA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3481, '351530', 'ESTRELA DO NORTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (429, '351515', 'ENGENHEIRO COELHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4408, '430520', 'CERRO LARGO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2050, '280200', 'DIVINA PASTORA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (828, '350775', 'BREJO ALEGRE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2923, '290970', 'CRISTOPOLIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1538, '316430', 'SAO ROQUE DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (158, '353390', 'OLIMPIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4632, '221038', 'SAO MIGUEL DA BAIXA GRANDE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2283, '411860', 'PAULA FREITAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1246, '311500', 'CASCALHO RICO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4669, '231310', 'TABULEIRO DO NORTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (979, '291440', 'IRAQUARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2478, '210800', 'PASTOS BONS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4148, '210660', 'MATOES', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (766, '310110', 'AIMORES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1253, '311770', 'CONCEICAO DO RIO VERDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (963, '291150', 'GONGOGI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2089, '314720', 'PARAGUACU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3896, '510480', 'JACIARA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3875, '315250', 'POUSO ALEGRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1626, '315410', 'RECREIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (565, '250600', 'ESPERANCA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1049, '250900', 'MANAIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1508, '314390', 'MURIAE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1378, '314200', 'MIRABELA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4466, '310910', 'BUENO BRANDAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1384, '314290', 'MONTE AZUL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2420, '311710', 'CONCEICAO DA APARECIDA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5042, '316170', 'SAO GONCALO DO ABAETE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1343, '412030', 'PORTO VITORIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3569, '352560', 'JOAO RAMALHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5007, '313860', 'LIMA DUARTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4657, '230763', 'MADALENA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4793, '521870', 'RIANAPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (437, '354500', 'SALESOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4015, '421300', 'PINHEIRO PRETO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5053, '316310', 'SAO JOSE DA VARGINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1609, '315090', 'PIRANGUCU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4114, '210255', 'CAMPESTRE DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1312, '411150', 'IVAIPORA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5418, '431261', 'MUITOS CAPOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4244, '311970', 'CORONEL XAVIER CHAVES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2094, '110037', 'ALTO ALEGRE DOS PARECIS', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (451, '411230', 'JAPIRA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2111, '150616', 'RIO MARIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3616, '421070', 'MATOS COSTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1939, '260430', 'CEDRO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4574, '316080', 'SAO BENTO ABADE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3881, '315520', 'RIO ESPERA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2542, '150445', 'MEDICILANDIA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2519, '260630', 'GRANITO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1360, '412520', 'SAO JORGE D''OESTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4394, '430380', 'CAMPINAS DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1180, '231120', 'POTENGI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (544, '250260', 'IGARACY', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (924, '420417', 'CERRO NEGRO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1615, '315230', 'PORTO FIRME', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (603, '280660', 'SANTO AMARO DAS BROTAS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4884, '310050', 'ACUCENA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4044, '421715', 'SAO MIGUEL DA BOA VISTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (797, '320210', 'ECOPORANGA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3411, '150563', 'PICARRA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1018, '292120', 'MIGUEL CALMON', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4849, '291870', 'LAFAIETE COUTINHO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1970, '261530', 'TIMBAUBA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (667, '110175', 'VALE DO ANARI', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2529, '280120', 'CANINDE DE SAO FRANCISCO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3135, '421850', 'TREZE TILIAS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (346, '220220', 'CAMPO MAIOR', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (138, '353030', 'MIRASSOL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1568, '210275', 'CAPINZAL DO NORTE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2501, '240370', 'FELIPE GUERRA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5411, '431217', 'MATO QUEIMADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1810, '320410', 'PINHEIROS', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (624, '290265', 'BANZAE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3487, '351600', 'FLORIDA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1694, '420770', 'IPUMIRIM', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5387, '431000', 'IBIRUBA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1643, '315710', 'SALTO DA DIVISA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4391, '430350', 'CAMAQUA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3317, '430080', 'ANTONIO PRADO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2199, '320515', 'VILA PAVAO', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4505, '311640', 'CLARAVAL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5572, '313753', 'LAGOA GRANDE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1696, '420800', 'ITA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2534, '292720', 'RUY BARBOSA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4786, '510810', 'TESOURO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2125, '171500', 'NOVA ROSALANDIA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1262, '312260', 'DOM JOAQUIM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1453, '354740', 'SANTA RITA D''OESTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1848, '291260', 'IBIQUERA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5258, '520055', 'ALTO HORIZONTE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2113, '150650', 'SANTA ISABEL DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2062, '320370', 'MUNIZ FREIRE', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1842, '290930', 'CORRENTINA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (305, '211110', 'SAO JOAO DOS PATOS', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4879, '293270', 'URUCUCA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5347, '521850', 'QUIRINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1617, '315260', 'POUSO ALTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1402, '314560', 'OLIVEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1482, '312540', 'FELICIO DOS SANTOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3802, '310350', 'ARAGUARI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (787, '313350', 'ITAPECERICA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3374, '314180', 'MINAS NOVAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (402, '310820', 'BONFINOPOLIS DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3864, '314870', 'PEDRA AZUL', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (63, '221063', 'SEBASTIAO LEAL', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4627, '220940', 'SANTO ANTONIO DE LISBOA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5099, '150658', 'SANTA MARIA DAS BARREIRAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2642, '330190', 'ITABORAI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (842, '351390', 'DIVINOLANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2705, '350610', 'BEBEDOURO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (805, '330360', 'PARACAMBI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4863, '292480', 'PIRITIBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4926, '500480', 'JAPORA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4455, '220680', 'NOSSA SENHORA DOS REMEDIOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2673, '350030', 'AGUAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2636, '330110', 'CANTAGALO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1853, '291535', 'ITAGUACU DA BAHIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3758, '432147', 'TIRADENTES DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1103, '270350', 'JACUIPE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2041, '231230', 'SAO BENEDITO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2433, '312270', 'DOM SILVERIO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2556, '150620', 'SALINOPOLIS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1728, '313060', 'INCONFIDENTES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3928, '110060', 'CACAULANDIA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (768, '310510', 'BAMBUI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (293, '293310', 'VARZEA DO POCO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2663, '330510', 'SAO JOAO DE MERITI', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3838, '351150', 'CERQUILHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1906, '251020', 'NOVA OLINDA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3271, '430087', 'ARARICA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (361, '220340', 'DOM EXPEDITO LOPES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5356, '430655', 'DOM PEDRO DE ALCANTARA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1574, '220670', 'NAZARE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1213, '240280', 'CORONEL EZEQUIEL', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1084, '251398', 'SAO FRANCISCO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4061, '241390', 'TAIPU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2423, '311850', 'CONSOLACAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4964, '510370', 'FELIZ NATAL', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3045, '260440', 'CHA DE ALEGRIA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3198, '411350', 'LOANDA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2853, '110030', 'VILHENA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2836, '270300', 'IBATEGUARA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3534, '352540', 'JERIQUARA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (481, '240640', 'LAGOA DE VELHOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (252, '241260', 'SAO PAULO DO POTENGI', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2042, '240270', 'CERRO CORA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3965, '310360', 'ARANTINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4055, '241180', 'SAO FERNANDO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3543, '352725', 'LOURDES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2623, '171515', 'NOVO ALEGRE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (783, '312980', 'IBIRITE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1119, '270644', 'PARIPUEIRA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1256, '311995', 'CORREGO FUNDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (658, '110080', 'CANDEIAS DO JAMARI', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3344, '522119', 'TEREZOPOLIS DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (160, '353440', 'OSASCO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4195, '210220', 'BURITI', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (694, '130240', 'LABREA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4707, '430843', 'FORQUETINHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5056, '316360', 'SAO JOSE DO MANTIMENTO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (65, '221093', 'SUSSUAPARA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1324, '411535', 'MARIPA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1151, '354430', 'ROSEIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1046, '250850', 'LIVRAMENTO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (199, '210193', 'BERNARDO DO MEARIM', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (8, '420425', 'COCAL DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1169, '354765', 'SANTA SALETE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (503, '241070', 'RIACHO DA CRUZ', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (493, '240870', 'PARAU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2138, '220385', 'FLORESTA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (23, '520015', 'ADELANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2999, '293340', 'WAGNER', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (377, '220535', 'JOAO COSTA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (285, '261270', 'SANTA MARIA DO CAMBUCA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5588, '130090', 'CANUTAMA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4686, '240490', 'ITAU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3566, '352420', 'JABORANDI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3792, '210370', 'CURURUPU', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3615, '421055', 'MAREMA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4385, '521910', 'SANTA BARBARA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1828, '290160', 'ANTAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5569, '150550', 'PARAGOMINAS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2712, '350730', 'BORACEIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3070, '260900', 'MACAPARANA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (154, '353310', 'NOVA GUATAPORANGA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2856, '150050', 'ALMEIRIM', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (848, '351570', 'FERRAZ DE VASCONCELOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2455, '270590', 'OLHO D''AGUA GRANDE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (268, '251220', 'PRATA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3556, '352980', 'MINEIROS DO TIETE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (637, '290485', 'CABACEIRAS DO PARAGUACU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4069, '250140', 'BAIA DA TRAICAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5200, '522040', 'SAO SIMAO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2851, '110010', 'GUAJARA-MIRIM', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5183, '521880', 'RIO VERDE', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (505, '241090', 'RIACHUELO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4935, '500710', 'RIBAS DO RIO PARDO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4062, '241415', 'TENENTE LAURENTINO CRUZ', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1902, '250915', 'MARIZOPOLIS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4852, '292030', 'MALHADA DE PEDRAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (223, '220635', 'MILTON BRANDAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1644, '315725', 'SANTA BARBARA DO LESTE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5354, '430640', 'DOIS IRMAOS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (243, '240350', 'ESPIRITO SANTO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (236, '230990', 'PACUJA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (264, '250905', 'MARCACAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4607, '251065', 'PARARI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5581, '315470', 'RIBEIRAO VERMELHO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3390, '130410', 'TAPAUA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4921, '500400', 'GLORIA DE DOURADOS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5495, '520800', 'FORMOSA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3035, '260270', 'BUENOS AIRES', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1057, '251010', 'NOVA FLORESTA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3122, '410220', 'ATALAIA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2580, '160027', 'LARANJAL DO JARI', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4716, '431036', 'IMIGRANTE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2510, '250540', 'DESTERRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (600, '280610', 'ROSARIO DO CATETE', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4440, '220380', 'FLORES DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (51, '220955', 'SAO BRAZ DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3782, '353710', 'PEDREIRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4314, '353510', 'PALMARES PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4029, '421535', 'SALTINHO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2970, '292900', 'SAO FELIX', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4643, '230195', 'BARREIRA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (337, '220177', 'BOA HORA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1833, '290400', 'BONINAL', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (681, '130040', 'BARCELOS', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2296, '412040', 'PRESIDENTE CASTELO BRANCO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5115, '430770', 'ESTEIO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (468, '412670', 'TAMBOARA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3643, '421795', 'TIGRINHOS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (792, '317057', 'VARGEM ALEGRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (506, '241110', 'RUY BARBOSA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4677, '240165', 'BODO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4057, '241270', 'SAO PEDRO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4603, '240630', 'LAGOA DE PEDRAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4612, '270390', 'JUNDIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (185, '353910', 'PIRAPORA DO BOM JESUS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (226, '220930', 'SANTA LUZ', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4064, '241480', 'VERA CRUZ', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3039, '260345', 'CAMARAGIBE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1404, '314585', 'ORATORIOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (863, '412450', 'SANTO INACIO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1044, '250790', 'JURIPIRANGA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4230, '211190', 'SUCUPIRA DO NORTE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3749, '432057', 'SETE DE SETEMBRO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4341, '520280', 'AVELINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2458, '270700', 'PINDOBA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2046, '260840', 'JUREMA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5612, '220672', 'NAZARIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (61, '221050', 'SAO PEDRO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4631, '221010', 'SAO JOSE DO PEIXE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5331, '521500', 'NOVA VENEZA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (129, '230930', 'NOVA RUSSAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4557, '510170', 'BARRA DO BUGRES', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4674, '240070', 'ALTO DO RODRIGUES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2392, '270650', 'PASSO DE CAMARAGIBE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (141, '353100', 'MONCOES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2476, '210465', 'GOVERNADOR NEWTON BELLO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1652, '351519', 'ESPIRITO SANTO DO TURVO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1908, '251100', 'PEDRA BRANCA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2490, '221020', 'SAO JOSE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4141, '210565', 'JUNCO DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5036, '316095', 'SAO DOMINGOS DAS DORES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2457, '270660', 'PAULO JACINTO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4081, '172000', 'SANTA TEREZINHA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4566, '510460', 'ITIQUIRA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1113, '270550', 'MURICI', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3213, '412535', 'SAO JORGE DO PATROCINIO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2364, '250690', 'ITABAIANA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (557, '250485', 'COXIXOLA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5114, '430755', 'ESTACAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (821, '350510', 'BARBOSA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (395, '220710', 'OLHO D''AGUA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2924, '291030', 'ELISIO MEDRADO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (435, '353715', 'PEDRINHAS PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3923, '353270', 'NIPOA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2550, '150560', 'PEIXE-BOI', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1673, '351650', 'GABRIEL MONTEIRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4086, '172080', 'SITIO NOVO DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1025, '292220', 'MUNIZ FERREIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5521, '355150', 'SERRANA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5613, '431454', 'PINTO BANDEIRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3633, '421540', 'SALTO VELOSO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (492, '240860', 'PARANA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5246, '293280', 'UTINGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4878, '293230', 'UBATA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (467, '412625', 'SARANDI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (537, '250153', 'BARAUNA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2357, '241500', 'VILA FLOR', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5128, '431179', 'MARATA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (800, '320420', 'PIUMA', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3284, '430558', 'COLINAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (26, '520355', 'BONFINOPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4474, '311020', 'CAJURI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3521, '352250', 'ITAPEVI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3585, '420257', 'BOM JESUS DO OESTE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (396, '220735', 'PAJEU DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1904, '250960', 'MONTE HOREBE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (572, '270940', 'VICOSA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (499, '241010', 'POCO BRANCO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2098, '140002', 'AMAJARI', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1223, '240460', 'IELMO MARINHO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (388, '220610', 'MATIAS OLIMPIO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4637, '221150', 'VERA MENDES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1986, '130290', 'MAUES', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3391, '130430', 'URUCARA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2895, '250320', 'CABEDELO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1045, '250810', 'LAGOA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2698, '350480', 'BALSAMO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3484, '351565', 'FERNAO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1818, '280370', 'MACAMBIRA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (864, '412460', 'SAO CARLOS DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4513, '432225', 'TUPANDI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4398, '430461', 'CANUDOS DO VALE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2819, '261650', 'XEXEU', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (818, '350370', 'ARIRANHA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1859, '291920', 'LAURO DE FREITAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (260, '250527', 'CURRAL DE CIMA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (518, '241360', 'SEVERIANO MELO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3716, '431725', 'SANTA TEREZA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (71, '230010', 'ABAIARA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (94, '230395', 'CHOROZINHO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (747, '290660', 'CANDIBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (542, '250230', 'BOM SUCESSO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (375, '220525', 'JARDIM DO MULATO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1872, '292730', 'SALINAS DA MARGARIDA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3833, '350930', 'CAJOBI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5375, '430860', 'GARIBALDI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2830, '270200', 'COITE DO NOIA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2826, '270120', 'CACIMBINHAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (438, '355280', 'TABOAO DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1891, '250530', 'CURRAL VELHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3930, '110120', 'MINISTRO ANDREAZZA', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2387, '270320', 'IGREJA NOVA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (231, '230390', 'CHAVAL', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3750, '432065', 'SILVEIRA MARTINS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3632, '421530', 'SALETE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (136, '231060', 'PENAFORTE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (296, '310205', 'ALTO CAPARAO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5456, '500580', 'NIOAQUE', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4602, '240185', 'CAICARA DO NORTE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (521, '241410', 'TENENTE ANANIAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (345, '220217', 'CAMPO LARGO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5380, '430912', 'GRAMADO DOS LOUREIROS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2829, '270190', 'CHA PRETA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3820, '350360', 'AREIOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3141, '431335', 'NOVA ROMA DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5112, '430670', 'DONA FRANCISCA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4463, '220880', 'REGENERACAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1282, '410337', 'BRASILANDIA DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1305, '410975', 'IBEMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3692, '431475', 'POCO DAS ANTAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4837, '291200', 'IBIASSUCE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3709, '431647', 'SALVADOR DAS MISSOES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5048, '316257', 'SAO JOAO DO MANTENINHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3399, '150160', 'BONITO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1422, '354010', 'PONGAI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4303, '352885', 'MARAPOAMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1573, '220470', 'INHUMA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (560, '250520', 'CUITEGI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4703, '430760', 'ESTANCIA VELHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2986, '293130', 'TAPIRAMUTA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (55, '220997', 'SAO JOAO DO ARRAIAL', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2913, '290110', 'AMELIA RODRIGUES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1846, '291210', 'IBICARAI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5492, '500540', 'MARACAJU', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4917, '500320', 'CORUMBA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4564, '510385', 'GAUCHA DO NORTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2108, '150510', 'OBIDOS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (561, '250535', 'DAMIAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2677, '350080', 'ALFREDO MARCONDES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (515, '241320', 'SENADOR GEORGINO AVELINO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (653, '110032', 'SAO MIGUEL DO GUAPORE', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2577, '160020', 'CALCOENE', 8);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (256, '250205', 'BERNARDINO BATISTA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5419, '431262', 'MULITERNO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (951, '290950', 'CRAVOLANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2186, '317115', 'VERMELHO NOVO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3003, '310040', 'ACAIACA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (101, '230450', 'FRECHEIRINHA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5228, '220095', 'AROEIRAS DO ITAIM', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4600, '230180', 'BAIXIO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4696, '240850', 'OURO BRANCO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4063, '241470', 'VARZEA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (989, '291670', 'ITAQUARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1527, '315733', 'SANTA CRUZ DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1604, '315015', 'PIEDADE DE CARATINGA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3101, '261540', 'TORITAMA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2208, '330280', 'MENDES', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4891, '310330', 'ARACITABA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4901, '432330', 'VILA FLORES', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4684, '240440', 'GROSSOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2044, '260330', 'CALCADO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2241, '411240', 'JAPURA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1225, '240485', 'ITAJA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5445, '432285', 'VESPASIANO CORREA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1897, '250760', 'JUAREZ TAVORA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4598, '220465', 'ILHA GRANDE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2896, '250480', 'COREMAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (740, '150360', 'ITAITUBA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3825, '350580', 'BASTOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (849, '351630', 'FRANCISCO MORATO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5433, '430535', 'CHARQUEADAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4697, '240895', 'RIO DO FOGO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4334, '520005', 'ABADIA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3464, '351240', 'CORDEIROPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (578, '280150', 'CARMOPOLIS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2331, '280520', 'PINHAO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4687, '240540', 'JAPI', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1354, '412360', 'SANTA INES', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1852, '291510', 'ITAGI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2546, '150530', 'ORIXIMINA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2512, '250939', 'MATUREIA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1653, '330093', 'CARAPEBUS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2126, '171790', 'PONTE ALTA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (906, '420195', 'BALNEARIO ARROIO DO SILVA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3021, '251740', 'ZABELE', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2243, '411270', 'JATAIZINHO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2649, '330285', 'MESQUITA', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (54, '220995', 'SAO JOAO DA VARJOTA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1959, '261150', 'QUIPAPA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5332, '521523', 'NOVO GAMA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2142, '220985', 'SAO JOAO DA CANABRAVA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2524, '261440', 'SOLIDAO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5257, '520025', 'AGUAS LINDAS DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5388, '431030', 'ILOPOLIS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4276, '312520', 'FAMA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (45, '220900', 'RIO GRANDE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2326, '260830', 'JUPI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (117, '230725', 'JIJOCA DE JERICOACOARA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5548, '430460', 'CANOAS', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2920, '290700', 'CARDEAL DA SILVA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (709, '140020', 'CARACARAI', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3765, '353000', 'MIRA ESTRELA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4725, '431162', 'LINDOLFO COLLOR', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2474, '210173', 'BELAGUA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3207, '412020', 'PORTO RICO', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (107, '230535', 'ICAPUI', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4578, '316255', 'SAO JOAO DO MANHUACU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3347, '522185', 'VALPARAISO DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1217, '240325', 'PARNAMIRIM', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (692, '130195', 'ITAMARATI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4427, '220192', 'BONFIM DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (343, '220208', 'CAJUEIRO DA PRAIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5589, '130210', 'JAPURA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3055, '260610', 'GLORIA DO GOITA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2757, '355520', 'TURIUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4066, '250057', 'ALGODAO DE JANDAIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2446, '270135', 'CAMPESTRE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4527, '432235', 'UNIAO DA SERRA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1923, '251590', 'SERRARIA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2472, '172020', 'SAO MIGUEL DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3264, '421820', 'TIMBO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4556, '510140', 'ARIPUANA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (594, '280470', 'NOSSA SENHORA DE LOURDES', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5133, '431306', 'NOVA HARTZ', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2745, '355310', 'TAIACU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2974, '292960', 'SAPEACU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3869, '315053', 'PINGO-D''AGUA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (359, '220327', 'CURRAL NOVO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (344, '220209', 'CALDEIRAO GRANDE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4830, '290800', 'COARACI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4989, '150276', 'CUMARU DO NORTE', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1803, '320332', 'MARATAIZES', 12);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3059, '260730', 'IPUBI', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4434, '220271', 'COCAL DE TELHA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1001, '291850', 'JUSSARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4346, '520440', 'CAIAPONIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5337, '521580', 'PALMELO', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5026, '315935', 'SANTA RITA DE MINAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1031, '292305', 'NOVO TRIUNFO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3126, '411710', 'NOVA LONDRINA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (480, '240615', 'JUNDIA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5102, '171245', 'LUZINOPOLIS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1197, '231380', 'URUBURETAMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2944, '292467', 'PIRAI DO NORTE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1315, '411260', 'JARDIM OLINDA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2456, '270620', 'PALESTINA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5095, '130360', 'SANTA ISABEL DO RIO NEGRO', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2140, '220650', 'MONSENHOR HIPOLITO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1153, '354470', 'SAGRES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2736, '355130', 'SEBASTIANOPOLIS DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3592, '420419', 'CHAPADAO DO LAGEADO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (387, '220605', 'MASSAPE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2330, '270870', 'SAO MIGUEL DOS MILAGRES', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (216, '220205', 'CABECEIRAS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2444, '270080', 'BELEM', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1043, '250780', 'JUNCO DO SERIDO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (604, '280690', 'SAO FRANCISCO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2257, '411470', 'MARIA HELENA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1473, '355530', 'TURMALINA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2930, '291460', 'IRECE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2759, '355590', 'URU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2451, '270440', 'MAJOR ISIDORO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2997, '293320', 'VERA CRUZ', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (549, '250350', 'CACIMBA DE DENTRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5610, '422000', 'Balneário Rincão', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3474, '351420', 'DOLCINOPOLIS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1579, '240160', 'BENTO FERNANDES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2001, '171830', 'PRAIA NORTE', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3844, '351495', 'EMBAUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (59, '221039', 'SAO MIGUEL DO FIDALGO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3233, '420757', 'IOMERE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (367, '220420', 'FRANCISCO SANTOS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4680, '240290', 'CORONEL JOAO PESSOA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4822, '290395', 'BOM JESUS DA SERRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4712, '430955', 'HARMONIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3025, '260060', 'ALAGOINHA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1098, '251597', 'SOBRADO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1926, '251675', 'TENORIO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (502, '241040', 'PUREZA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3948, '130230', 'JUTAI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4605, '250355', 'CACIMBAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3529, '352450', 'JACI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1108, '270460', 'MARAVILHA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (20, '510770', 'ROSARIO OESTE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5609, '421265', 'Pescaria Brava', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1202, '240060', 'ALMINO AFONSO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (738, '150340', 'INHANGAPI', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4135, '210520', 'IGARAPE GRANDE', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (523, '241445', 'TRIUNFO POTIGUAR', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1064, '251180', 'PIRPIRITUBA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4905, '432380', 'XANGRI-LA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4031, '421555', 'SANTA HELENA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3459, '351120', 'CATIGUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4952, '510250', 'CACERES', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (990, '291685', 'ITATIM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (856, '351907', 'HORTOLANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2852, '110020', 'PORTO VELHO', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2735, '355120', 'SARUTAIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (558, '250500', 'CUBATI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (247, '240760', 'MESSIAS TARGINO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (489, '240790', 'MONTE DAS GAMELEIRAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3471, '351380', 'DIADEMA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (228, '221090', 'SOCORRO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1844, '291020', 'DOM MACEDO COSTA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (616, '290150', 'ANGUERA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1198, '231395', 'VARJOTA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (272, '251500', 'SAO MIGUEL DE TAIPU', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4047, '240930', 'PATU', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5571, '313620', 'JOAO MONLEVADE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3193, '410920', 'GUARACI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3102, '261550', 'TRACUNHAEM', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2995, '293315', 'VARZEA NOVA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2431, '312235', 'DIVISA ALEGRE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2351, '240680', 'LAJES PINTADAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (459, '411915', 'PINHAIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3166, '355465', 'TORRE DE PEDRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2956, '292650', 'RIBEIRA DO AMPARO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1110, '270490', 'MAR VERMELHO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (325, '220027', 'ALEGRETE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1336, '411830', 'PARANAPOEMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3392, '140005', 'ALTO ALEGRE', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2356, '241490', 'VICOSA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2389, '270410', 'LAGOA DA CANOA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1984, '130120', 'COARI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (540, '250210', 'BOA VENTURA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2514, '251396', 'SAO DOMINGOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (270, '251360', 'SANTANA DOS GARROTES', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5434, '430543', 'CHUI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (420, '330015', 'APERIBE', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1053, '250937', 'MATO GROSSO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4367, '521205', 'JESUPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3737, '431937', 'SAO PEDRO DO BUTIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4876, '293170', 'TERRA NOVA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4067, '250077', 'APARECIDA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5511, '354690', 'SANTA LUCIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3089, '261280', 'SANTA TEREZINHA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2791, '410340', 'CAFEARA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1144, '354310', 'RIBEIRAO CORRENTE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (146, '353205', 'MOTUCA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2892, '240940', 'PAU DOS FERROS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5525, '355320', 'TAIUVA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4426, '220180', 'BOCAINA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5152, '431971', 'SAO VALENTIM DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5467, '510630', 'PARANATINGA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2385, '270210', 'COLONIA LEOPOLDINA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5491, '500270', 'CAMPO GRANDE', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (964, '291160', 'GOVERNADOR MANGABEIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2235, '411130', 'ITAUNA DO SUL', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (699, '130350', 'PAUINI', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2906, '270140', 'CAMPO ALEGRE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3087, '261247', 'SANTA CRUZ DA BAIXA VERDE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1222, '240430', 'GOVERNADOR DIX-SEPT ROSADO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3086, '261230', 'SALOA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4430, '220211', 'CAMPO ALEGRE DO FIDALGO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (349, '220253', 'CARAUBAS DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2370, '251394', 'SAO DOMINGOS DO CARIRI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (642, '290610', 'CANAPOLIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3406, '150375', 'JACAREACANGA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1109, '270480', 'MARIBONDO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5116, '430807', 'FAZENDA VILANOVA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4840, '291420', 'IRAJUBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1769, '317040', 'UNAI', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2507, '250010', 'AGUA BRANCA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1823, '280650', 'SANTA ROSA DE LIMA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (57, '221030', 'SAO JULIAO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4162, '210845', 'PERITORO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2395, '270910', 'TAQUARANA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4629, '220980', 'SAO GONCALO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4668, '231260', 'SAO LUIS DO CURU', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3559, '352115', 'IPIGUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1705, '420950', 'LAURENTINO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2888, '231085', 'PINDORETAMA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1597, '314900', 'PEDRA DOURADA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4054, '241150', 'SANTO ANTONIO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (495, '240910', 'PASSA E FICA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1953, '260880', 'LAJEDO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1677, '352000', 'IGARACU DO TIETE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3091, '261340', 'SAO JOSE DA COROA GRANDE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1227, '293140', 'TEODORO SAMPAIO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4580, '316295', 'SAO JOSE DA LAPA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2861, '150420', 'MARABA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2447, '270180', 'CARNEIROS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3409, '150503', 'NOVO PROGRESSO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4050, '241025', 'PORTO DO MANGUE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4737, '431308', 'NOVA PADUA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1172, '354830', 'SANTO EXPEDITO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (638, '290530', 'CAFARNAUM', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4231, '311720', 'CONCEICAO DAS PEDRAS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3339, '430310', 'CACHOEIRINHA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4095, '210055', 'AMAPA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2165, '312850', 'GUARARA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (37, '220793', 'PEDRO LAURENTINO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (83, '230190', 'BARBALHA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3906, '292465', 'PINTADAS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5248, '290500', 'CACULE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (524, '241450', 'UMARIZAL', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2360, '250190', 'BELEM', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2268, '411650', 'NOVA ALIANCA DO IVAI', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (602, '280640', 'SANTANA DO SAO FRANCISCO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1899, '250840', 'LASTRO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4041, '421660', 'SAO JOSE', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2333, '290360', 'BIRITINGA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (684, '130080', 'BORBA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3468, '351330', 'CRUZALIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (697, '130300', 'NHAMUNDA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (985, '291570', 'ITAMARI', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1401, '314550', 'OLIMPIO NORONHA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (327, '220045', 'ALVORADA DO GURGUEIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (629, '290330', 'BARRO PRETO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (115, '230700', 'JAGUARUANA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5542, '412720', 'TERRA BOA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1501, '313867', 'LUISBURGO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (562, '250550', 'VISTA SERRANA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1216, '240320', 'DOUTOR SEVERIANO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5611, '150475', 'MOJUI DOS CAMPOS', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (193, '354075', 'POTIM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (439, '355690', 'VISTA ALEGRE DO ALTO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (813, '350170', 'AMERICO BRASILIENSE', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3686, '431413', 'PAULO BENTO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1445, '354450', 'RUBINEIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (567, '250650', 'GURJAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (130, '230960', 'PACAJUS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2825, '270110', 'BRANQUINHA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3405, '150345', 'IPIXUNA DO PARA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2945, '292490', 'PLANALTINO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5165, '510650', 'POCONE', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2071, '411750', 'PAICANDU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (324, '220025', 'ALAGOINHA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5161, '432218', 'TUPANCI DO SUL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4157, '210770', 'PARAIBANO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2912, '290040', 'AGUA FRIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4980, '510622', 'NOVA MUTUM', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1208, '240190', 'CAICARA DO RIO DO VENTO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (905, '420180', 'ATALANTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4186, '210010', 'AFONSO CUNHA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (554, '250420', 'CATINGUEIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3835, '351015', 'CANITAR', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1065, '251190', 'PITIMBU', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (335, '220155', 'BELA VISTA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (734, '150300', 'FARO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1101, '251670', 'TEIXEIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1396, '314467', 'NOVA BELEM', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1371, '314040', 'MARMELOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4495, '311470', 'CARVALHOPOLIS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1901, '250880', 'MALTA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4076, '250403', 'CAPIM', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1120, '270680', 'PIACABUCU', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1148, '354410', 'RIO GRANDE DA SERRA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4733, '431260', 'MUCUM', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1890, '250490', 'CRUZ DO ESPIRITO SANTO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2237, '411160', 'IVATUBA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5127, '431142', 'LAJEADO DO BUGRE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1087, '251445', 'SAO JOSE DOS RAMOS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2448, '270235', 'CRAIBAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1937, '260360', 'CAMUTANGA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (962, '291120', 'GANDU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1411, '314700', 'PARACATU', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3136, '430163', 'BALNEARIO PINHAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2891, '240340', 'EQUADOR', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (50, '220950', 'SANTO INACIO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (36, '220785', 'PAVUSSU', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3327, '430195', 'BARRA FUNDA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4045, '421725', 'SAO PEDRO DE ALCANTARA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1111, '270520', 'MESSIAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (353, '220273', 'COIVARAS', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (497, '240980', 'PEDRO VELHO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4735, '431280', 'NOVA ARACA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (248, '240880', 'PARAZINHO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2994, '293300', 'VALENTE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1987, '130380', 'SAO GABRIEL DA CACHOEIRA', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3582, '420205', 'BALNEARIO BARRA DO SUL', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1226, '240500', 'JACANA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4664, '231110', 'PORTEIRAS', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (958, '291075', 'FATIMA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (588, '280360', 'LARANJEIRAS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4310, '353286', 'NOVA CASTILHO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4229, '211174', 'SENADOR ALEXANDRE COSTA', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3017, '410810', 'FLORIDA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4810, '280570', 'PROPRIA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4456, '220730', 'PAES LANDIM', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1174, '354880', 'SAO CAETANO DO SUL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3558, '352080', 'INUBIA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (336, '220157', 'BELEM DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2857, '150085', 'ANAPU', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2593, '170290', 'AXIXA DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1873, '292790', 'SANTA INES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1638, '315630', 'RODEIRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2144, '221160', 'VILA NOVA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4529, '432254', 'VALE REAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2547, '150543', 'OURILANDIA DO NORTE', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4694, '240770', 'MONTANHAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2229, '411030', 'INAJA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4639, '230060', 'ALTANEIRA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1941, '260500', 'CUPIRA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1091, '251470', 'SAO JOSE DO SABUGI', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4093, '210043', 'ALTO ALEGRE DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1212, '240250', 'CARNAUBAIS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2201, '330023', 'ARMACAO DOS BUZIOS', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4395, '430390', 'CAMPO BOM', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4934, '500660', 'PONTA PORA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (552, '250380', 'CALDAS BRANDAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4828, '290687', 'CAPIM GROSSO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2654, '330395', 'PINHEIRAL', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1326, '411570', 'MATINHOS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (266, '251030', 'NOVA PALMEIRA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1821, '280510', 'PEDRINHAS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2051, '280600', 'RIBEIROPOLIS', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (608, '280760', 'UMBAUBA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (308, '211140', 'SAO LUIS GONZAGA DO MARANHAO', 14);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3010, '410752', 'ESPERANCA NOVA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4949, '510160', 'BARAO DE MELGACO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (408, '313010', 'IGARAPE', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1679, '352310', 'ITAQUAQUECETUBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2773, '410090', 'AMAPORA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3062, '260775', 'ITAPISSUMA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4959, '510330', 'COMODORO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (717, '150060', 'ALTAMIRA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1303, '410910', 'GUAPOREMA', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1052, '250930', 'MATARACA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1175, '354900', 'SAO FRANCISCO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2141, '220850', 'PORTO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (663, '110143', 'NOVA UNIAO', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1038, '292405', 'PE DE SERRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (927, '420435', 'CORDILHEIRA ALTA', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (711, '140047', 'RORAINOPOLIS', 26);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5288, '520620', 'CRISTALINA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (29, '520680', 'DAMOLANDIA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (391, '220667', 'MORRO DO CHAPEU DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2445, '270100', 'BOCA DA MATA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4616, '291450', 'IRARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1475, '355570', 'UNIAO PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2527, '270530', 'MINADOR DO NEGRAO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1166, '354710', 'SANTA MERCEDES', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5125, '431080', 'IVOTI', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1066, '251207', 'POCO DE JOSE DE MOURA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2375, '260450', 'CHA GRANDE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (504, '241080', 'RIACHO DE SANTANA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3639, '421700', 'SAO LUDGERO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4585, '316553', 'SARZEDO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (242, '240170', 'BOM JESUS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1131, '270895', 'SENADOR RUI PALMEIRA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4048, '240950', 'PEDRA GRANDE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2036, '220690', 'NOVO ORIENTE DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1132, '270900', 'TANQUE D''ARCA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1206, '240145', 'BARAUNA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4693, '240730', 'MARCELINO VIEIRA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4904, '432377', 'WESTFALIA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1116, '270600', 'OLIVENCA', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (575, '280070', 'BREJO GRANDE', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4462, '220860', 'PRATA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3417, '150780', 'SENADOR JOSE PORFIRIO', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2049, '270510', 'MATRIZ DE CAMARAGIBE', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3046, '260460', 'CONDADO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (261, '250620', 'FREI MARTINHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4387, '521950', 'SANTA ROSA DE GOIAS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5093, '110155', 'TEIXEIROPOLIS', 25);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3073, '260960', 'OLINDA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1917, '251392', 'SAO BENTINHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4850, '291910', 'LAMARAO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3689, '431445', 'PINHAL', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1907, '251060', 'OURO VELHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (498, '240990', 'PENDENCIAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1469, '355365', 'TAQUARAL', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (861, '412405', 'SANTA TEREZINHA DE ITAIPU', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (128, '230920', 'NOVA OLINDA', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (844, '351460', 'DUMONT', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (220, '220490', 'ISAIAS COELHO', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4981, '510624', 'NOVA UBIRATA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2359, '250180', 'BAYEUX', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (254, '241420', 'TIBAU DO SUL', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2570, '150796', 'TERRA ALTA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2656, '330411', 'PORTO REAL', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2515, '251560', 'SERRA DA RAIZ', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4955, '510270', 'CANARANA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3140, '431164', 'LINHA NOVA', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1071, '251274', 'RIACHAO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3596, '420519', 'ERMO', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3333, '430237', 'BOM PROGRESSO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (517, '241340', 'SERRA NEGRA DO NORTE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1943, '260545', 'FERNANDO DE NORONHA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2504, '240970', 'PEDRO AVELINO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4049, '241000', 'PILOES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (271, '251430', 'SAO JOSE DE CAIANA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4801, '280230', 'FREI PAULO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4207, '270340', 'JACARE DOS HOMENS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (516, '241335', 'SERRA DO MEL', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4928, '500520', 'LADARIO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2251, '411380', 'LUPIONOPOLIS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3447, '171550', 'OLIVEIRA DE FATIMA', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (70, '221140', 'VARZEA GRANDE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1704, '420920', 'LACERDOPOLIS', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2959, '292680', 'RIO DO ANTONIO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (548, '250340', 'CACIMBA DE AREIA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4679, '240240', 'CARNAUBA DOS DANTAS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2369, '251300', 'SALGADINHO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4441, '220410', 'FRANCISCO AYRES', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2942, '292420', 'PEDRO ALEXANDRE', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4886, '310140', 'ALBERTINA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5122, '431033', 'IMBE', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (999, '291835', 'JOAO DOURADO', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2927, '291185', 'HELIOPOLIS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3224, '420475', 'CUNHATAI', 28);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1971, '261560', 'TRINDADE', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (386, '220595', 'MARCOLANDIA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5551, '500110', 'AQUIDAUANA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3358, '313630', 'JOAO PINHEIRO', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4451, '220600', 'MARCOS PARENTE', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1896, '250710', 'ITAPOROROCA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2697, '350470', 'BALBINOS', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3270, '430057', 'ALTO FELIZ', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1054, '250950', 'MONTADAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (810, '350060', 'AGUAS DE SAO PEDRO', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (835, '351060', 'CARAPICUIBA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1710, '312737', 'GOIABEIRA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (251, '241142', 'SANTANA DO SERIDO', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2348, '240120', 'ARES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2358, '250073', 'AMPARO', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2953, '292610', 'RETIROLANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (507, '241160', 'SAO BENTO DO NORTE', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1575, '220970', 'SAO FRANCISCO DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1409, '314660', 'PAIVA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5296, '520840', 'GOIANAPOLIS', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5604, '500627', 'PARAISO DAS AGUAS', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3660, '430165', 'BARAO', 2);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5180, '510785', 'SAO FELIX DO ARAGUAIA', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2394, '270820', 'SAO BRAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (601, '280620', 'SALGADO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1051, '250920', 'MASSARANDUBA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4182, '172030', 'SAO SEBASTIAO DO TOCANTINS', 31);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4796, '280010', 'AMPARO DE SAO FRANCISCO', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2541, '150442', 'MARITUBA', 18);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (551, '250375', 'CAJAZEIRINHAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (139, '353040', 'MIRASSOLANDIA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (529, '250050', 'ALAGOINHA', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1019, '292130', 'MILAGRES', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2097, '130270', 'MANICORE', 7);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2058, '293060', 'SERROLANDIA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4549, '500690', 'PORTO MURTINHO', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3147, '500840', 'VICENTINA', 16);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1345, '412080', 'QUATRO BARRAS', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (763, '292975', 'SAUBARA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1122, '270720', 'POCO DAS TRINCHEIRAS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3369, '314015', 'MARIO CAMPOS', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1059, '251070', 'PASSAGEM', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3536, '352585', 'JUMIRIM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2647, '330245', 'MACUCO', 4);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3509, '352060', 'INDIANA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3553, '352940', 'MAUA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2762, '355635', 'VARGEM', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (249, '240960', 'PEDRA PRETA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1125, '270770', 'RIO LARGO', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3174, '221170', 'WALL FERRAZ', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (943, '290810', 'COCOS', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (122, '230790', 'MARTINOPOLE', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4060, '241355', 'SERRINHA DOS PINTOS', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (407, '312920', 'HELIODORA', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3061, '260760', 'ILHA DE ITAMARACA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1934, '260250', 'BREJINHO', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3473, '351400', 'DOBRADA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4806, '280430', 'MURIBECA', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (329, '220070', 'ANISIO DE ABREU', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2526, '270250', 'DOIS RIACHOS', 6);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1294, '410710', 'DIAMANTE DO NORTE', 22);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2378, '261030', 'PARANATAMA', 3);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1870, '292560', 'PRESIDENTE DUTRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (380, '220554', 'LAGOINHA DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (944, '290820', 'CONCEICAO DA FEIRA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (98, '230428', 'EUSEBIO', 10);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4692, '240700', 'LUIS GOMES', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3780, '353625', 'PARISI', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3772, '353284', 'NOVA CANAA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4815, '290030', 'ACAJUTIBA', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2502, '240510', 'JANDAIRA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4329, '510792', 'SORRISO', 17);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (583, '280250', 'GENERAL MAYNARD', 29);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3159, '314053', 'MARTINS SOARES', 15);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3168, '250120', 'AREIAL', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2707, '350640', 'BILAC', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3917, '352090', 'IPAUSSU', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (1477, '355650', 'VARZEA PAULISTA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (398, '220755', 'PAQUETA', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5591, '250115', 'AREIA DE BARAUNAS', 19);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (328, '220060', 'ANGICAL DO PIAUI', 21);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2701, '350530', 'BARRA BONITA', 30);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (4695, '240820', 'NISIA FLORESTA', 1);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (2928, '291330', 'ICHU', 9);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (5486, '521710', 'PIRACANJUBA', 13);
INSERT INTO base_municipio (id, codigo, nome, estado_id) VALUES (3143, '431820', 'SAO FRANCISCO DE PAULA', 2);
''')
]
| 100.741416
| 123
| 0.724127
| 83,611
| 569,189
| 4.795135
| 0.182189
| 0.14045
| 0.19663
| 0.321486
| 0.731681
| 0.731681
| 0.731681
| 0.731681
| 0.729189
| 0.729189
| 0
| 0.130112
| 0.127281
| 569,189
| 5,649
| 124
| 100.759249
| 0.676995
| 0.000037
| 0
| 0
| 0
| 0.995214
| 0.999596
| 0.000037
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.0039
| 0.000355
| 0
| 0.000886
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d7d65068211446f8515328894bb1ef1ab08df445
| 1,574
|
py
|
Python
|
l3coefs_proc.py
|
RobbinBouwmeester/CALLC_evaluation
|
0125ed88b767c305261cf5731c671f890bfacadd
|
[
"Apache-2.0"
] | 2
|
2020-01-23T09:51:37.000Z
|
2020-04-23T00:28:16.000Z
|
l3coefs_proc.py
|
RobbinBouwmeester/CALLC_evaluation
|
0125ed88b767c305261cf5731c671f890bfacadd
|
[
"Apache-2.0"
] | null | null | null |
l3coefs_proc.py
|
RobbinBouwmeester/CALLC_evaluation
|
0125ed88b767c305261cf5731c671f890bfacadd
|
[
"Apache-2.0"
] | null | null | null |
infile = open("data/coefs/L3_coefs_nodup.csv")
outfile = open("data/coefs/L3_coefs_proc_nodup.csv","w")
coefs_dict = {}
for line in infile:
split_line = line.strip().split("\t")
mods = "_".join(split_line[0].split("_")[:-2])
mods_feat = "_".join(split_line[1].split("_")[:-1])
score = float(split_line[2])
if mods in coefs_dict.keys():
if mods_feat in coefs_dict[mods].keys():
coefs_dict[mods][mods_feat] += score
else:
coefs_dict[mods][mods_feat] = score
else:
coefs_dict[mods] = {}
if mods_feat in coefs_dict[mods].keys():
coefs_dict[mods][mods_feat] += score
else:
coefs_dict[mods][mods_feat] = score
for k,it in coefs_dict.items():
for k2,it2 in it.items():
print(k,it,k2,it2)
outfile.write("%s\t%s\t%s\n" % (k,k2,it2))
outfile.close()
infile = open("data/coefs/L3_coefs_dup.csv")
outfile = open("data/coefs/L3_coefs_proc_dup.csv","w")
coefs_dict = {}
for line in infile:
split_line = line.strip().split("\t")
mods = "_".join(split_line[0].split("_")[:-2])
mods_feat = "_".join(split_line[1].split("_")[:-1])
score = float(split_line[2])
if mods in coefs_dict.keys():
if mods_feat in coefs_dict[mods].keys():
coefs_dict[mods][mods_feat] += score
else:
coefs_dict[mods][mods_feat] = score
else:
coefs_dict[mods] = {}
if mods_feat in coefs_dict[mods].keys():
coefs_dict[mods][mods_feat] += score
else:
coefs_dict[mods][mods_feat] = score
for k,it in coefs_dict.items():
for k2,it2 in it.items():
print(k,it,k2,it2)
outfile.write("%s\t%s\t%s\n" % (k,k2,it2))
outfile.close()
| 25.803279
| 56
| 0.658196
| 266
| 1,574
| 3.669173
| 0.146617
| 0.184426
| 0.186475
| 0.139344
| 0.983607
| 0.983607
| 0.930328
| 0.930328
| 0.860656
| 0.860656
| 0
| 0.019403
| 0.148666
| 1,574
| 61
| 57
| 25.803279
| 0.708955
| 0
| 0
| 0.916667
| 0
| 0
| 0.101587
| 0.07746
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d7dcc0ec8073cadec58ed0b2f2243253cbd913db
| 300
|
py
|
Python
|
tests/model/ddl/conftest.py
|
agdsn/pycroft
|
ea771141d59c88fdb8a782eafbe106240550a33a
|
[
"Apache-2.0"
] | 18
|
2016-04-20T19:00:56.000Z
|
2021-12-19T16:43:57.000Z
|
tests/model/ddl/conftest.py
|
agdsn/pycroft
|
ea771141d59c88fdb8a782eafbe106240550a33a
|
[
"Apache-2.0"
] | 461
|
2016-07-20T00:42:59.000Z
|
2022-03-25T17:03:07.000Z
|
tests/model/ddl/conftest.py
|
agdsn/pycroft
|
ea771141d59c88fdb8a782eafbe106240550a33a
|
[
"Apache-2.0"
] | 15
|
2016-07-15T18:46:43.000Z
|
2021-03-17T20:08:39.000Z
|
import pytest
from tests.model.ddl import create_table
@pytest.fixture(scope='session')
def table():
return create_table('test')
@pytest.fixture(scope='session')
def table2():
return create_table('test2')
@pytest.fixture(scope='session')
def table3():
return create_table('test3')
| 15.789474
| 40
| 0.72
| 39
| 300
| 5.435897
| 0.461538
| 0.207547
| 0.254717
| 0.353774
| 0.396226
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 0.133333
| 300
| 18
| 41
| 16.666667
| 0.8
| 0
| 0
| 0.272727
| 0
| 0
| 0.116667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| true
| 0
| 0.181818
| 0.272727
| 0.727273
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d7ed52fa9552f645a82c7fd0b4d2cca8adbb5e17
| 184
|
py
|
Python
|
theo/framework.py
|
TheodoreWon/python-library
|
0ef99eec503970cf04d0ab89596faee791b27a09
|
[
"MIT"
] | 3
|
2018-12-04T02:49:00.000Z
|
2018-12-08T03:02:46.000Z
|
theo/framework.py
|
TheodoreWon/python-library
|
0ef99eec503970cf04d0ab89596faee791b27a09
|
[
"MIT"
] | null | null | null |
theo/framework.py
|
TheodoreWon/python-library
|
0ef99eec503970cf04d0ab89596faee791b27a09
|
[
"MIT"
] | 1
|
2018-12-08T03:02:29.000Z
|
2018-12-08T03:02:29.000Z
|
from theo.src.framework.DictList import DictList
from theo.src.framework.Log import Log
from theo.src.framework.Component import Component
from theo.src.framework.System import System
| 36.8
| 50
| 0.847826
| 28
| 184
| 5.571429
| 0.321429
| 0.205128
| 0.282051
| 0.512821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 184
| 4
| 51
| 46
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d7f5ab5e6b2b1514c04ba6b5b313f77045b944b3
| 163
|
py
|
Python
|
moto/compat.py
|
stj/moto
|
502957f1f9560cb6dff75271e6812498f4ff7bba
|
[
"Apache-2.0"
] | 3
|
2020-08-04T20:29:41.000Z
|
2020-11-09T09:28:19.000Z
|
moto/compat.py
|
stj/moto
|
502957f1f9560cb6dff75271e6812498f4ff7bba
|
[
"Apache-2.0"
] | null | null | null |
moto/compat.py
|
stj/moto
|
502957f1f9560cb6dff75271e6812498f4ff7bba
|
[
"Apache-2.0"
] | 1
|
2021-03-01T08:48:09.000Z
|
2021-03-01T08:48:09.000Z
|
try:
from collections import OrderedDict # noqa
except ImportError:
# python 2.6 or earlier, use backport
from ordereddict import OrderedDict # noqa
| 27.166667
| 47
| 0.736196
| 20
| 163
| 6
| 0.75
| 0.283333
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015748
| 0.220859
| 163
| 5
| 48
| 32.6
| 0.929134
| 0.276074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc13a9f8f79176eea122bfb4b31773959bef68e3
| 91
|
py
|
Python
|
wayremap/__init__.py
|
acro5piano/wayremap
|
6240f61bdbcede68c90f5818da7f730b69bb484a
|
[
"MIT"
] | 44
|
2021-12-09T19:42:13.000Z
|
2022-03-18T10:26:52.000Z
|
wayremap/__init__.py
|
acro5piano/wayremap
|
6240f61bdbcede68c90f5818da7f730b69bb484a
|
[
"MIT"
] | 6
|
2021-12-11T07:02:39.000Z
|
2022-02-19T05:16:45.000Z
|
wayremap/__init__.py
|
acro5piano/wayremap
|
6240f61bdbcede68c90f5818da7f730b69bb484a
|
[
"MIT"
] | null | null | null |
from wayremap.main import *
from wayremap.config import *
from wayremap.constants import *
| 22.75
| 32
| 0.802198
| 12
| 91
| 6.083333
| 0.5
| 0.493151
| 0.493151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131868
| 91
| 3
| 33
| 30.333333
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc1d8ecff3ee4958410c383f9567d7e00915ba60
| 147
|
py
|
Python
|
gym-example/gym_example/envs/__init__.py
|
psimarro/gym_example
|
13a7278ea817b00c80f723893dd3a51da49e8119
|
[
"MIT"
] | null | null | null |
gym-example/gym_example/envs/__init__.py
|
psimarro/gym_example
|
13a7278ea817b00c80f723893dd3a51da49e8119
|
[
"MIT"
] | null | null | null |
gym-example/gym_example/envs/__init__.py
|
psimarro/gym_example
|
13a7278ea817b00c80f723893dd3a51da49e8119
|
[
"MIT"
] | null | null | null |
from gym_example.envs.example_env import Example_v0
from gym_example.envs.fail1 import Fail_v1
from gym_example.envs.kvazaar_env import Kvazaar_v0
| 36.75
| 51
| 0.877551
| 26
| 147
| 4.653846
| 0.423077
| 0.173554
| 0.347107
| 0.446281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02963
| 0.081633
| 147
| 3
| 52
| 49
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0be5e1dcfb2b96dd1ad75a45be21c347367c6fe7
| 314,819
|
py
|
Python
|
docusign_esign/apis/templates_api.py
|
hunk/docusign-python-client
|
a643c42c1236715e74eef6fc279a1b29da1b5455
|
[
"MIT"
] | null | null | null |
docusign_esign/apis/templates_api.py
|
hunk/docusign-python-client
|
a643c42c1236715e74eef6fc279a1b29da1b5455
|
[
"MIT"
] | null | null | null |
docusign_esign/apis/templates_api.py
|
hunk/docusign-python-client
|
a643c42c1236715e74eef6fc279a1b29da1b5455
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..client.configuration import Configuration
from ..client.api_client import ApiClient
class TemplatesApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_custom_fields(self, account_id, template_id, **kwargs):
"""
Creates custom document fields in an existing template document.
Creates custom document fields in an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_custom_fields(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateCustomFields template_custom_fields:
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_custom_fields_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.create_custom_fields_with_http_info(account_id, template_id, **kwargs)
return data
def create_custom_fields_with_http_info(self, account_id, template_id, **kwargs):
"""
Creates custom document fields in an existing template document.
Creates custom document fields in an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_custom_fields_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateCustomFields template_custom_fields:
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'template_custom_fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_custom_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_custom_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_custom_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/custom_fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_custom_fields' in params:
body_params = params['template_custom_fields']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomFields',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_document_fields(self, account_id, document_id, template_id, **kwargs):
"""
Creates custom document fields in an existing template document.
Creates custom document fields in an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_document_fields(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentFieldsInformation document_fields_information:
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.create_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def create_document_fields_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Creates custom document fields in an existing template document.
Creates custom document fields in an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_document_fields_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentFieldsInformation document_fields_information:
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'document_fields_information']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_document_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_document_fields`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `create_document_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_document_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document_fields_information' in params:
body_params = params['document_fields_information']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentFieldsInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_edit_view(self, account_id, template_id, **kwargs):
"""
Provides a URL to start an edit view of the Template UI
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_edit_view(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param ReturnUrlRequest return_url_request:
:return: ViewUrl
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_edit_view_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.create_edit_view_with_http_info(account_id, template_id, **kwargs)
return data
def create_edit_view_with_http_info(self, account_id, template_id, **kwargs):
"""
Provides a URL to start an edit view of the Template UI
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_edit_view_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param ReturnUrlRequest return_url_request:
:return: ViewUrl
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'return_url_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_edit_view" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_edit_view`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_edit_view`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/views/edit'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'return_url_request' in params:
body_params = params['return_url_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ViewUrl',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_lock(self, account_id, template_id, **kwargs):
"""
Lock a template.
Locks the specified template, and sets the time until the lock expires, to prevent other users or recipients from accessing and changing the template. ###### Note: Users must have envelope locking capability enabled to use this function (the userSetting property `canLockEnvelopes` must be set to **true** for the user).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_lock(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param LockRequest lock_request:
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_lock_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.create_lock_with_http_info(account_id, template_id, **kwargs)
return data
def create_lock_with_http_info(self, account_id, template_id, **kwargs):
"""
Lock a template.
Locks the specified template, and sets the time until the lock expires, to prevent other users or recipients from accessing and changing the template. ###### Note: Users must have envelope locking capability enabled to use this function (the userSetting property `canLockEnvelopes` must be set to **true** for the user).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_lock_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param LockRequest lock_request:
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'lock_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_lock" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_lock`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_lock`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/lock'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'lock_request' in params:
body_params = params['lock_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LockInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_recipients(self, account_id, template_id, **kwargs):
"""
Adds tabs for a recipient.
Adds one or more recipients to a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_recipients(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str resend_envelope:
:param TemplateRecipients template_recipients:
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_recipients_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.create_recipients_with_http_info(account_id, template_id, **kwargs)
return data
def create_recipients_with_http_info(self, account_id, template_id, **kwargs):
"""
Adds tabs for a recipient.
Adds one or more recipients to a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_recipients_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str resend_envelope:
:param TemplateRecipients template_recipients:
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'resend_envelope', 'template_recipients']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_recipients`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_recipients`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'resend_envelope' in params:
query_params['resend_envelope'] = params['resend_envelope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_recipients' in params:
body_params = params['template_recipients']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Recipients',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_tabs(self, account_id, recipient_id, template_id, **kwargs):
"""
Adds tabs for a recipient.
Adds one or more tabs for a recipient.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_tabs(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.create_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def create_tabs_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Adds tabs for a recipient.
Adds one or more tabs for a recipient.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_tabs_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id', 'template_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_tabs`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `create_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_tabs' in params:
body_params = params['template_tabs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_template(self, account_id, **kwargs):
"""
Creates an envelope from a template.
Creates a template definition using a multipart request. ###Template Email Subject Merge Fields Call this endpoint to insert a recipient name and email address merge fields into the email subject line when creating or sending from a template. The merge fields, based on the recipient's role name, are added to the `emailSubject` property when the template is created or when the template is used to create an envelope. After a template sender adds the name and email information for the recipient and sends the envelope, the recipient information is automatically merged into the appropriate fields in the email subject line. Both the sender and the recipients will see the information in the email subject line for any emails associated with the template. This provides an easy way for senders to organize their envelope emails without having to open an envelope to check the recipient. ###### Note: If merging the recipient information into the subject line causes the subject line to exceed 100 characters, then any characters over the 100 character limit are not included in the subject line. For cases where the recipient name or email is expected to be long, you should consider placing the merge field at the start of the email subject. To add a recipient's name in the subject line add the following text in the `emailSubject` property when creating the template or when sending an envelope from a template: [[<roleName>_UserName]] Example: `\"emailSubject\":\"[[Signer 1_UserName]], Please sign this NDA\",` To add a recipient's email address in the subject line add the following text in the `emailSubject` property when creating the template or when sending an envelope from a template: [[<roleName>_Email]] Example: `\"emailSubject\":\"[[Signer 1_Email]], Please sign this NDA\",` In both cases the <roleName> is the recipient's contents of the `roleName` property in the template. For cases where another recipient (such as an Agent, Editor, or Intermediary recipient) is entering the name and email information for the recipient included in the email subject, then [[<roleName>_UserName]] or [[<roleName>_Email]] is shown in the email subject.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param EnvelopeTemplate envelope_template:
:return: TemplateSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_template_with_http_info(account_id, **kwargs)
else:
(data) = self.create_template_with_http_info(account_id, **kwargs)
return data
def create_template_with_http_info(self, account_id, **kwargs):
"""
Creates an envelope from a template.
Creates a template definition using a multipart request. ###Template Email Subject Merge Fields Call this endpoint to insert a recipient name and email address merge fields into the email subject line when creating or sending from a template. The merge fields, based on the recipient's role name, are added to the `emailSubject` property when the template is created or when the template is used to create an envelope. After a template sender adds the name and email information for the recipient and sends the envelope, the recipient information is automatically merged into the appropriate fields in the email subject line. Both the sender and the recipients will see the information in the email subject line for any emails associated with the template. This provides an easy way for senders to organize their envelope emails without having to open an envelope to check the recipient. ###### Note: If merging the recipient information into the subject line causes the subject line to exceed 100 characters, then any characters over the 100 character limit are not included in the subject line. For cases where the recipient name or email is expected to be long, you should consider placing the merge field at the start of the email subject. To add a recipient's name in the subject line add the following text in the `emailSubject` property when creating the template or when sending an envelope from a template: [[<roleName>_UserName]] Example: `\"emailSubject\":\"[[Signer 1_UserName]], Please sign this NDA\",` To add a recipient's email address in the subject line add the following text in the `emailSubject` property when creating the template or when sending an envelope from a template: [[<roleName>_Email]] Example: `\"emailSubject\":\"[[Signer 1_Email]], Please sign this NDA\",` In both cases the <roleName> is the recipient's contents of the `roleName` property in the template. For cases where another recipient (such as an Agent, Editor, or Intermediary recipient) is entering the name and email information for the recipient included in the email subject, then [[<roleName>_UserName]] or [[<roleName>_Email]] is shown in the email subject.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_with_http_info(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param EnvelopeTemplate envelope_template:
:return: TemplateSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'envelope_template']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_template`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'envelope_template' in params:
body_params = params['envelope_template']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_template_document_responsive_html_preview(self, account_id, document_id, template_id, **kwargs):
"""
Post Responsive HTML Preview for a document in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_document_responsive_html_preview(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentHtmlDefinition document_html_definition:
:return: DocumentHtmlDefinitions
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_template_document_responsive_html_preview_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.create_template_document_responsive_html_preview_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def create_template_document_responsive_html_preview_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Post Responsive HTML Preview for a document in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_document_responsive_html_preview_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentHtmlDefinition document_html_definition:
:return: DocumentHtmlDefinitions
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'document_html_definition']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_template_document_responsive_html_preview" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_template_document_responsive_html_preview`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `create_template_document_responsive_html_preview`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_template_document_responsive_html_preview`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/responsive_html_preview'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document_html_definition' in params:
body_params = params['document_html_definition']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentHtmlDefinitions',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_template_document_tabs(self, account_id, document_id, template_id, **kwargs):
"""
Adds the tabs to a tempate
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_document_tabs(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_template_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.create_template_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def create_template_document_tabs_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Adds the tabs to a tempate
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_document_tabs_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'template_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_template_document_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_template_document_tabs`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `create_template_document_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_template_document_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_tabs' in params:
body_params = params['template_tabs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_template_recipient_preview(self, account_id, template_id, **kwargs):
"""
Provides a URL to start a recipient view of the Envelope UI
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_recipient_preview(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param RecipientPreviewRequest recipient_preview_request:
:return: ViewUrl
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_template_recipient_preview_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.create_template_recipient_preview_with_http_info(account_id, template_id, **kwargs)
return data
def create_template_recipient_preview_with_http_info(self, account_id, template_id, **kwargs):
"""
Provides a URL to start a recipient view of the Envelope UI
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_recipient_preview_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param RecipientPreviewRequest recipient_preview_request:
:return: ViewUrl
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'recipient_preview_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_template_recipient_preview" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_template_recipient_preview`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_template_recipient_preview`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/views/recipient_preview'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'recipient_preview_request' in params:
body_params = params['recipient_preview_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ViewUrl',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_template_responsive_html_preview(self, account_id, template_id, **kwargs):
"""
Get Responsive HTML Preview for all documents in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_responsive_html_preview(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentHtmlDefinition document_html_definition:
:return: DocumentHtmlDefinitions
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_template_responsive_html_preview_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.create_template_responsive_html_preview_with_http_info(account_id, template_id, **kwargs)
return data
def create_template_responsive_html_preview_with_http_info(self, account_id, template_id, **kwargs):
"""
Get Responsive HTML Preview for all documents in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_responsive_html_preview_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentHtmlDefinition document_html_definition:
:return: DocumentHtmlDefinitions
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'document_html_definition']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_template_responsive_html_preview" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `create_template_responsive_html_preview`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_template_responsive_html_preview`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/responsive_html_preview'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document_html_definition' in params:
body_params = params['document_html_definition']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentHtmlDefinitions',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_bulk_recipients(self, account_id, recipient_id, template_id, **kwargs):
"""
Deletes the bulk recipient list on a template.
Deletes the bulk recipient list on a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_bulk_recipients(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: BulkRecipientsUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_bulk_recipients_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.delete_bulk_recipients_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def delete_bulk_recipients_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Deletes the bulk recipient list on a template.
Deletes the bulk recipient list on a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_bulk_recipients_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: BulkRecipientsUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_bulk_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_bulk_recipients`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `delete_bulk_recipients`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_bulk_recipients`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}/bulk_recipients'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BulkRecipientsUpdateResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_custom_fields(self, account_id, template_id, **kwargs):
"""
Deletes envelope custom fields in a template.
Deletes envelope custom fields in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_custom_fields(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateCustomFields template_custom_fields:
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_custom_fields_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.delete_custom_fields_with_http_info(account_id, template_id, **kwargs)
return data
def delete_custom_fields_with_http_info(self, account_id, template_id, **kwargs):
"""
Deletes envelope custom fields in a template.
Deletes envelope custom fields in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_custom_fields_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateCustomFields template_custom_fields:
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'template_custom_fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_custom_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_custom_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_custom_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/custom_fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_custom_fields' in params:
body_params = params['template_custom_fields']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomFields',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_document_fields(self, account_id, document_id, template_id, **kwargs):
"""
Deletes custom document fields from an existing template document.
Deletes custom document fields from an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_document_fields(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentFieldsInformation document_fields_information:
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.delete_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def delete_document_fields_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Deletes custom document fields from an existing template document.
Deletes custom document fields from an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_document_fields_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentFieldsInformation document_fields_information:
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'document_fields_information']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_document_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_document_fields`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `delete_document_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_document_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document_fields_information' in params:
body_params = params['document_fields_information']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentFieldsInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_document_page(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Deletes a page from a document in an template.
Deletes a page from a document in a template based on the page number.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_document_page(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param PageRequest page_request:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_document_page_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
else:
(data) = self.delete_document_page_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
return data
def delete_document_page_with_http_info(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Deletes a page from a document in an template.
Deletes a page from a document in a template based on the page number.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_document_page_with_http_info(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param PageRequest page_request:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'page_number', 'template_id', 'page_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_document_page" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_document_page`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `delete_document_page`")
# verify the required parameter 'page_number' is set
if ('page_number' not in params) or (params['page_number'] is None):
raise ValueError("Missing the required parameter `page_number` when calling `delete_document_page`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_document_page`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/pages/{pageNumber}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'page_number' in params:
path_params['pageNumber'] = params['page_number']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'page_request' in params:
body_params = params['page_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_documents(self, account_id, template_id, **kwargs):
"""
Deletes documents from a template.
Deletes one or more documents from an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_documents(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param EnvelopeDefinition envelope_definition:
:return: TemplateDocumentsResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_documents_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.delete_documents_with_http_info(account_id, template_id, **kwargs)
return data
def delete_documents_with_http_info(self, account_id, template_id, **kwargs):
"""
Deletes documents from a template.
Deletes one or more documents from an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_documents_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param EnvelopeDefinition envelope_definition:
:return: TemplateDocumentsResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'envelope_definition']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_documents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_documents`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_documents`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'envelope_definition' in params:
body_params = params['envelope_definition']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateDocumentsResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group_share(self, account_id, template_id, template_part, **kwargs):
"""
Removes a member group's sharing permissions for a template.
Removes a member group's sharing permissions for a specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_share(account_id, template_id, template_part, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str template_part: Currently, the only defined part is **groups**. (required)
:param GroupInformation group_information:
:return: GroupInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_group_share_with_http_info(account_id, template_id, template_part, **kwargs)
else:
(data) = self.delete_group_share_with_http_info(account_id, template_id, template_part, **kwargs)
return data
def delete_group_share_with_http_info(self, account_id, template_id, template_part, **kwargs):
"""
Removes a member group's sharing permissions for a template.
Removes a member group's sharing permissions for a specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_share_with_http_info(account_id, template_id, template_part, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str template_part: Currently, the only defined part is **groups**. (required)
:param GroupInformation group_information:
:return: GroupInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'template_part', 'group_information']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group_share" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_group_share`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_group_share`")
# verify the required parameter 'template_part' is set
if ('template_part' not in params) or (params['template_part'] is None):
raise ValueError("Missing the required parameter `template_part` when calling `delete_group_share`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/{templatePart}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
if 'template_part' in params:
path_params['templatePart'] = params['template_part']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'group_information' in params:
body_params = params['group_information']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GroupInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_lock(self, account_id, template_id, **kwargs):
"""
Deletes a template lock.
Deletes the lock from the specified template. The `X-DocuSign-Edit` header must be included in the request.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_lock(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param LockRequest lock_request:
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_lock_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.delete_lock_with_http_info(account_id, template_id, **kwargs)
return data
def delete_lock_with_http_info(self, account_id, template_id, **kwargs):
"""
Deletes a template lock.
Deletes the lock from the specified template. The `X-DocuSign-Edit` header must be included in the request.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_lock_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param LockRequest lock_request:
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'lock_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_lock" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_lock`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_lock`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/lock'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'lock_request' in params:
body_params = params['lock_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LockInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_recipient(self, account_id, recipient_id, template_id, **kwargs):
"""
Deletes the specified recipient file from a template.
Deletes the specified recipient file from the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_recipient(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateRecipients template_recipients:
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_recipient_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.delete_recipient_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def delete_recipient_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Deletes the specified recipient file from a template.
Deletes the specified recipient file from the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_recipient_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateRecipients template_recipients:
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id', 'template_recipients']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_recipient" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_recipient`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `delete_recipient`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_recipient`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_recipients' in params:
body_params = params['template_recipients']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Recipients',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_recipients(self, account_id, template_id, **kwargs):
"""
Deletes recipients from a template.
Deletes one or more recipients from a template. Recipients to be deleted are listed in the request, with the `recipientId` being used as the key for deleting recipients.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_recipients(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateRecipients template_recipients:
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_recipients_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.delete_recipients_with_http_info(account_id, template_id, **kwargs)
return data
def delete_recipients_with_http_info(self, account_id, template_id, **kwargs):
"""
Deletes recipients from a template.
Deletes one or more recipients from a template. Recipients to be deleted are listed in the request, with the `recipientId` being used as the key for deleting recipients.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_recipients_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateRecipients template_recipients:
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'template_recipients']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_recipients`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_recipients`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_recipients' in params:
body_params = params['template_recipients']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Recipients',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_tabs(self, account_id, recipient_id, template_id, **kwargs):
"""
Deletes the tabs associated with a recipient in a template.
Deletes one or more tabs associated with a recipient in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_tabs(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.delete_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def delete_tabs_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Deletes the tabs associated with a recipient in a template.
Deletes one or more tabs associated with a recipient in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_tabs_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id', 'template_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_tabs`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `delete_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_tabs' in params:
body_params = params['template_tabs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_template_document_tabs(self, account_id, document_id, template_id, **kwargs):
"""
Deletes tabs from an envelope document
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_template_document_tabs(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_template_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.delete_template_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def delete_template_document_tabs_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Deletes tabs from an envelope document
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_template_document_tabs_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'template_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_template_document_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `delete_template_document_tabs`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `delete_template_document_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `delete_template_document_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_tabs' in params:
body_params = params['template_tabs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get(self, account_id, template_id, **kwargs):
"""
Gets a list of templates for a specified account.
Retrieves the definition of the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include:
:return: EnvelopeTemplate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.get_with_http_info(account_id, template_id, **kwargs)
return data
def get_with_http_info(self, account_id, template_id, **kwargs):
"""
Gets a list of templates for a specified account.
Retrieves the definition of the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include:
:return: EnvelopeTemplate
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'include']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'include' in params:
query_params['include'] = params['include']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EnvelopeTemplate',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_document(self, account_id, document_id, template_id, **kwargs):
"""
Gets PDF documents from a template.
Retrieves one or more PDF documents from the specified template. You can specify the ID of the document to retrieve or can specify `combined` to retrieve all documents in the template as one pdf.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_document(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str encrypt:
:param str show_changes:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_document_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.get_document_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def get_document_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Gets PDF documents from a template.
Retrieves one or more PDF documents from the specified template. You can specify the ID of the document to retrieve or can specify `combined` to retrieve all documents in the template as one pdf.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_document_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str encrypt:
:param str show_changes:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'encrypt', 'show_changes']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_document`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_document`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_document`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'encrypt' in params:
query_params['encrypt'] = params['encrypt']
if 'show_changes' in params:
query_params['show_changes'] = params['show_changes']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/pdf'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_document_page_image(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Gets a page image from a template for display.
Retrieves a page image for display from the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_document_page_image(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str dpi:
:param str max_height:
:param str max_width:
:param str show_changes:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_document_page_image_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
else:
(data) = self.get_document_page_image_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
return data
def get_document_page_image_with_http_info(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Gets a page image from a template for display.
Retrieves a page image for display from the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_document_page_image_with_http_info(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str dpi:
:param str max_height:
:param str max_width:
:param str show_changes:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'page_number', 'template_id', 'dpi', 'max_height', 'max_width', 'show_changes']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_document_page_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_document_page_image`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_document_page_image`")
# verify the required parameter 'page_number' is set
if ('page_number' not in params) or (params['page_number'] is None):
raise ValueError("Missing the required parameter `page_number` when calling `get_document_page_image`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_document_page_image`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/pages/{pageNumber}/page_image'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'page_number' in params:
path_params['pageNumber'] = params['page_number']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'dpi' in params:
query_params['dpi'] = params['dpi']
if 'max_height' in params:
query_params['max_height'] = params['max_height']
if 'max_width' in params:
query_params['max_width'] = params['max_width']
if 'show_changes' in params:
query_params['show_changes'] = params['show_changes']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['image/png'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_document_tabs(self, account_id, document_id, template_id, **kwargs):
"""
Returns tabs on the document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_document_tabs(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str page_numbers:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.get_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def get_document_tabs_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Returns tabs on the document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_document_tabs_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str page_numbers:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'page_numbers']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_document_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_document_tabs`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_document_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_document_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'page_numbers' in params:
query_params['page_numbers'] = params['page_numbers']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_lock(self, account_id, template_id, **kwargs):
"""
Gets template lock information.
Retrieves general information about the template lock. If the call is made by the user who has the lock and the request has the same integrator key as original, then the `X-DocuSign-Edit` header field and additional lock information is included in the response. This allows users to recover a lost editing session token and the `X-DocuSign-Edit` header.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_lock(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_lock_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.get_lock_with_http_info(account_id, template_id, **kwargs)
return data
def get_lock_with_http_info(self, account_id, template_id, **kwargs):
"""
Gets template lock information.
Retrieves general information about the template lock. If the call is made by the user who has the lock and the request has the same integrator key as original, then the `X-DocuSign-Edit` header field and additional lock information is included in the response. This allows users to recover a lost editing session token and the `X-DocuSign-Edit` header.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_lock_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_lock" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_lock`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_lock`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/lock'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LockInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_notification_settings(self, account_id, template_id, **kwargs):
"""
Gets template notification information.
Retrieves the envelope notification, reminders and expirations, information for an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_notification_settings(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_notification_settings_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.get_notification_settings_with_http_info(account_id, template_id, **kwargs)
return data
def get_notification_settings_with_http_info(self, account_id, template_id, **kwargs):
"""
Gets template notification information.
Retrieves the envelope notification, reminders and expirations, information for an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_notification_settings_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_notification_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_notification_settings`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_notification_settings`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/notification'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Notification',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_page_tabs(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Returns tabs on the specified page.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_page_tabs(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_page_tabs_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
else:
(data) = self.get_page_tabs_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
return data
def get_page_tabs_with_http_info(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Returns tabs on the specified page.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_page_tabs_with_http_info(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'page_number', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_page_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_page_tabs`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_page_tabs`")
# verify the required parameter 'page_number' is set
if ('page_number' not in params) or (params['page_number'] is None):
raise ValueError("Missing the required parameter `page_number` when calling `get_page_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_page_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/pages/{pageNumber}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'page_number' in params:
path_params['pageNumber'] = params['page_number']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_pages(self, account_id, document_id, template_id, **kwargs):
"""
Returns document page image(s) based on input.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_pages(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str count:
:param str dpi:
:param str max_height:
:param str max_width:
:param str nocache:
:param str show_changes:
:param str start_position:
:return: PageImages
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_pages_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.get_pages_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def get_pages_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Returns document page image(s) based on input.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_pages_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str count:
:param str dpi:
:param str max_height:
:param str max_width:
:param str nocache:
:param str show_changes:
:param str start_position:
:return: PageImages
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'count', 'dpi', 'max_height', 'max_width', 'nocache', 'show_changes', 'start_position']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pages" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_pages`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_pages`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_pages`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/pages'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'count' in params:
query_params['count'] = params['count']
if 'dpi' in params:
query_params['dpi'] = params['dpi']
if 'max_height' in params:
query_params['max_height'] = params['max_height']
if 'max_width' in params:
query_params['max_width'] = params['max_width']
if 'nocache' in params:
query_params['nocache'] = params['nocache']
if 'show_changes' in params:
query_params['show_changes'] = params['show_changes']
if 'start_position' in params:
query_params['start_position'] = params['start_position']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageImages',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template_document_html_definitions(self, account_id, document_id, template_id, **kwargs):
"""
Get the Original HTML Definition used to generate the Responsive HTML for a given document in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_template_document_html_definitions(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: DocumentHtmlDefinitionOriginals
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_template_document_html_definitions_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.get_template_document_html_definitions_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def get_template_document_html_definitions_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Get the Original HTML Definition used to generate the Responsive HTML for a given document in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_template_document_html_definitions_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: DocumentHtmlDefinitionOriginals
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template_document_html_definitions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_template_document_html_definitions`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_template_document_html_definitions`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_template_document_html_definitions`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/html_definitions'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentHtmlDefinitionOriginals',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template_html_definitions(self, account_id, template_id, **kwargs):
"""
Get the Original HTML Definition used to generate the Responsive HTML for the template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_template_html_definitions(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: DocumentHtmlDefinitionOriginals
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_template_html_definitions_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.get_template_html_definitions_with_http_info(account_id, template_id, **kwargs)
return data
def get_template_html_definitions_with_http_info(self, account_id, template_id, **kwargs):
"""
Get the Original HTML Definition used to generate the Responsive HTML for the template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_template_html_definitions_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: DocumentHtmlDefinitionOriginals
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template_html_definitions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_template_html_definitions`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `get_template_html_definitions`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/html_definitions'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentHtmlDefinitionOriginals',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_bulk_recipients(self, account_id, recipient_id, template_id, **kwargs):
"""
Gets the bulk recipient file from a template.
Retrieves the bulk recipient file information from a template that has a bulk recipient.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_bulk_recipients(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_tabs:
:param str start_position:
:return: BulkRecipientsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_bulk_recipients_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.list_bulk_recipients_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def list_bulk_recipients_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Gets the bulk recipient file from a template.
Retrieves the bulk recipient file information from a template that has a bulk recipient.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_bulk_recipients_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_tabs:
:param str start_position:
:return: BulkRecipientsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id', 'include_tabs', 'start_position']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_bulk_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_bulk_recipients`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `list_bulk_recipients`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `list_bulk_recipients`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}/bulk_recipients'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'include_tabs' in params:
query_params['include_tabs'] = params['include_tabs']
if 'start_position' in params:
query_params['start_position'] = params['start_position']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BulkRecipientsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_custom_fields(self, account_id, template_id, **kwargs):
"""
Gets the custom document fields from a template.
Retrieves the custom document field information from an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_custom_fields(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_custom_fields_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.list_custom_fields_with_http_info(account_id, template_id, **kwargs)
return data
def list_custom_fields_with_http_info(self, account_id, template_id, **kwargs):
"""
Gets the custom document fields from a template.
Retrieves the custom document field information from an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_custom_fields_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_custom_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_custom_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `list_custom_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/custom_fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomFields',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_document_fields(self, account_id, document_id, template_id, **kwargs):
"""
Gets the custom document fields for a an existing template document.
Retrieves the custom document fields for an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_document_fields(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.list_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def list_document_fields_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Gets the custom document fields for a an existing template document.
Retrieves the custom document fields for an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_document_fields_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_document_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_document_fields`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `list_document_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `list_document_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentFieldsInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_documents(self, account_id, template_id, **kwargs):
"""
Gets a list of documents associated with a template.
Retrieves a list of documents associated with the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_documents(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_tabs:
:return: TemplateDocumentsResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_documents_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.list_documents_with_http_info(account_id, template_id, **kwargs)
return data
def list_documents_with_http_info(self, account_id, template_id, **kwargs):
"""
Gets a list of documents associated with a template.
Retrieves a list of documents associated with the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_documents_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_tabs:
:return: TemplateDocumentsResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'include_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_documents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_documents`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `list_documents`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'include_tabs' in params:
query_params['include_tabs'] = params['include_tabs']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateDocumentsResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_recipients(self, account_id, template_id, **kwargs):
"""
Gets recipient information from a template.
Retrieves the information for all recipients in the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_recipients(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_anchor_tab_locations: When set to **true** and `include_tabs` is set to **true**, all tabs with anchor tab properties are included in the response.
:param str include_extended: When set to **true**, the extended properties are included in the response.
:param str include_tabs: When set to **true**, the tab information associated with the recipient is included in the response.
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_recipients_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.list_recipients_with_http_info(account_id, template_id, **kwargs)
return data
def list_recipients_with_http_info(self, account_id, template_id, **kwargs):
"""
Gets recipient information from a template.
Retrieves the information for all recipients in the specified template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_recipients_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_anchor_tab_locations: When set to **true** and `include_tabs` is set to **true**, all tabs with anchor tab properties are included in the response.
:param str include_extended: When set to **true**, the extended properties are included in the response.
:param str include_tabs: When set to **true**, the tab information associated with the recipient is included in the response.
:return: Recipients
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'include_anchor_tab_locations', 'include_extended', 'include_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_recipients`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `list_recipients`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'include_anchor_tab_locations' in params:
query_params['include_anchor_tab_locations'] = params['include_anchor_tab_locations']
if 'include_extended' in params:
query_params['include_extended'] = params['include_extended']
if 'include_tabs' in params:
query_params['include_tabs'] = params['include_tabs']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Recipients',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_tabs(self, account_id, recipient_id, template_id, **kwargs):
"""
Gets the tabs information for a signer or sign-in-person recipient in a template.
Gets the tabs information for a signer or sign-in-person recipient in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_tabs(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_anchor_tab_locations: When set to **true**, all tabs with anchor tab properties are included in the response.
:param str include_metadata:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.list_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def list_tabs_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Gets the tabs information for a signer or sign-in-person recipient in a template.
Gets the tabs information for a signer or sign-in-person recipient in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_tabs_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str include_anchor_tab_locations: When set to **true**, all tabs with anchor tab properties are included in the response.
:param str include_metadata:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id', 'include_anchor_tab_locations', 'include_metadata']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_tabs`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `list_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `list_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'include_anchor_tab_locations' in params:
query_params['include_anchor_tab_locations'] = params['include_anchor_tab_locations']
if 'include_metadata' in params:
query_params['include_metadata'] = params['include_metadata']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_templates(self, account_id, **kwargs):
"""
Gets the definition of a template.
Retrieves the list of templates for the specified account. The request can be limited to a specific folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_templates(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str count: Number of records to return in the cache.
:param str created_from_date:
:param str created_to_date:
:param str folder_ids: A comma separated list of folder ID GUIDs.
:param str folder_types:
:param str from_date: Start of the search date range. Only returns templates created on or after this date/time. If no value is specified, there is no limit on the earliest date created.
:param str include: A comma separated list of additional template attributes to include in the response. Valid values are: recipients, folders, documents, custom_fields, and notifications.
:param str is_download:
:param str modified_from_date:
:param str modified_to_date:
:param str order: Sets the direction order used to sort the list. Valid values are: -asc = ascending sort order (a to z) -desc = descending sort order (z to a)
:param str order_by: Sets the file attribute used to sort the list. Valid values are: -name: template name -modified: date/time template was last modified. -used: date/time the template was last used.
:param str search_fields:
:param str search_text: The search text used to search the names of templates.
:param str shared_by_me: If true, the response only includes templates shared by the user. If false, the response only returns template not shared by the user. If not specified, the response is not affected.
:param str start_position: The starting index for the first template shown in the response. This must be greater than or equal to 0 (zero).
:param str template_ids:
:param str to_date: End of the search date range. Only returns templates created up to this date/time. If no value is provided, this defaults to the current date.
:param str used_from_date: Start of the search date range. Only returns templates used or edited on or after this date/time. If no value is specified, there is no limit on the earliest date used.
:param str used_to_date: End of the search date range. Only returns templates used or edited up to this date/time. If no value is provided, this defaults to the current date.
:param str user_filter: Sets if the templates shown in the response Valid values are: -owned_by_me: only shows templates the user owns. -shared_with_me: only shows templates that are shared with the user. -all: shows all templates owned or shared with the user.
:param str user_id:
:return: EnvelopeTemplateResults
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_templates_with_http_info(account_id, **kwargs)
else:
(data) = self.list_templates_with_http_info(account_id, **kwargs)
return data
def list_templates_with_http_info(self, account_id, **kwargs):
"""
Gets the definition of a template.
Retrieves the list of templates for the specified account. The request can be limited to a specific folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_templates_with_http_info(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str count: Number of records to return in the cache.
:param str created_from_date:
:param str created_to_date:
:param str folder_ids: A comma separated list of folder ID GUIDs.
:param str folder_types:
:param str from_date: Start of the search date range. Only returns templates created on or after this date/time. If no value is specified, there is no limit on the earliest date created.
:param str include: A comma separated list of additional template attributes to include in the response. Valid values are: recipients, folders, documents, custom_fields, and notifications.
:param str is_download:
:param str modified_from_date:
:param str modified_to_date:
:param str order: Sets the direction order used to sort the list. Valid values are: -asc = ascending sort order (a to z) -desc = descending sort order (z to a)
:param str order_by: Sets the file attribute used to sort the list. Valid values are: -name: template name -modified: date/time template was last modified. -used: date/time the template was last used.
:param str search_fields:
:param str search_text: The search text used to search the names of templates.
:param str shared_by_me: If true, the response only includes templates shared by the user. If false, the response only returns template not shared by the user. If not specified, the response is not affected.
:param str start_position: The starting index for the first template shown in the response. This must be greater than or equal to 0 (zero).
:param str template_ids:
:param str to_date: End of the search date range. Only returns templates created up to this date/time. If no value is provided, this defaults to the current date.
:param str used_from_date: Start of the search date range. Only returns templates used or edited on or after this date/time. If no value is specified, there is no limit on the earliest date used.
:param str used_to_date: End of the search date range. Only returns templates used or edited up to this date/time. If no value is provided, this defaults to the current date.
:param str user_filter: Sets if the templates shown in the response Valid values are: -owned_by_me: only shows templates the user owns. -shared_with_me: only shows templates that are shared with the user. -all: shows all templates owned or shared with the user.
:param str user_id:
:return: EnvelopeTemplateResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'count', 'created_from_date', 'created_to_date', 'folder_ids', 'folder_types', 'from_date', 'include', 'is_download', 'modified_from_date', 'modified_to_date', 'order', 'order_by', 'search_fields', 'search_text', 'shared_by_me', 'start_position', 'template_ids', 'to_date', 'used_from_date', 'used_to_date', 'user_filter', 'user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_templates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_templates`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
query_params = {}
if 'count' in params:
query_params['count'] = params['count']
if 'created_from_date' in params:
query_params['created_from_date'] = params['created_from_date']
if 'created_to_date' in params:
query_params['created_to_date'] = params['created_to_date']
if 'folder_ids' in params:
query_params['folder_ids'] = params['folder_ids']
if 'folder_types' in params:
query_params['folder_types'] = params['folder_types']
if 'from_date' in params:
query_params['from_date'] = params['from_date']
if 'include' in params:
query_params['include'] = params['include']
if 'is_download' in params:
query_params['is_download'] = params['is_download']
if 'modified_from_date' in params:
query_params['modified_from_date'] = params['modified_from_date']
if 'modified_to_date' in params:
query_params['modified_to_date'] = params['modified_to_date']
if 'order' in params:
query_params['order'] = params['order']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'search_fields' in params:
query_params['search_fields'] = params['search_fields']
if 'search_text' in params:
query_params['search_text'] = params['search_text']
if 'shared_by_me' in params:
query_params['shared_by_me'] = params['shared_by_me']
if 'start_position' in params:
query_params['start_position'] = params['start_position']
if 'template_ids' in params:
query_params['template_ids'] = params['template_ids']
if 'to_date' in params:
query_params['to_date'] = params['to_date']
if 'used_from_date' in params:
query_params['used_from_date'] = params['used_from_date']
if 'used_to_date' in params:
query_params['used_to_date'] = params['used_to_date']
if 'user_filter' in params:
query_params['user_filter'] = params['user_filter']
if 'user_id' in params:
query_params['user_id'] = params['user_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EnvelopeTemplateResults',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def rotate_document_page(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Rotates page image from a template for display.
Rotates page image from a template for display. The page image can be rotated to the left or right.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.rotate_document_page(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param PageRequest page_request:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.rotate_document_page_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
else:
(data) = self.rotate_document_page_with_http_info(account_id, document_id, page_number, template_id, **kwargs)
return data
def rotate_document_page_with_http_info(self, account_id, document_id, page_number, template_id, **kwargs):
"""
Rotates page image from a template for display.
Rotates page image from a template for display. The page image can be rotated to the left or right.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.rotate_document_page_with_http_info(account_id, document_id, page_number, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str page_number: The page number being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param PageRequest page_request:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'page_number', 'template_id', 'page_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method rotate_document_page" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `rotate_document_page`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `rotate_document_page`")
# verify the required parameter 'page_number' is set
if ('page_number' not in params) or (params['page_number'] is None):
raise ValueError("Missing the required parameter `page_number` when calling `rotate_document_page`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `rotate_document_page`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/pages/{pageNumber}/page_image'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'page_number' in params:
path_params['pageNumber'] = params['page_number']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'page_request' in params:
body_params = params['page_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, account_id, template_id, **kwargs):
"""
Updates an existing template.
Updates an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param EnvelopeTemplate envelope_template:
:return: TemplateUpdateSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.update_with_http_info(account_id, template_id, **kwargs)
return data
def update_with_http_info(self, account_id, template_id, **kwargs):
"""
Updates an existing template.
Updates an existing template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param EnvelopeTemplate envelope_template:
:return: TemplateUpdateSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'envelope_template']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'envelope_template' in params:
body_params = params['envelope_template']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateUpdateSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_bulk_recipients(self, account_id, recipient_id, template_id, **kwargs):
"""
Adds or replaces the bulk recipients list in a template.
Updates the bulk recipients in a template using a file upload. The Content-Type supported for uploading a bulk recipient file is CSV (text/csv). The REST API does not support modifying individual rows or values in the bulk recipients file. It only allows the entire file to be added or replaced with a new file.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_bulk_recipients(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param BulkRecipientsRequest bulk_recipients_request:
:return: BulkRecipientsSummaryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_bulk_recipients_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.update_bulk_recipients_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def update_bulk_recipients_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Adds or replaces the bulk recipients list in a template.
Updates the bulk recipients in a template using a file upload. The Content-Type supported for uploading a bulk recipient file is CSV (text/csv). The REST API does not support modifying individual rows or values in the bulk recipients file. It only allows the entire file to be added or replaced with a new file.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_bulk_recipients_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param BulkRecipientsRequest bulk_recipients_request:
:return: BulkRecipientsSummaryResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id', 'bulk_recipients_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_bulk_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_bulk_recipients`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `update_bulk_recipients`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_bulk_recipients`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}/bulk_recipients'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'bulk_recipients_request' in params:
body_params = params['bulk_recipients_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BulkRecipientsSummaryResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_custom_fields(self, account_id, template_id, **kwargs):
"""
Updates envelope custom fields in a template.
Updates the custom fields in a template. Each custom field used in a template must have a unique name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_custom_fields(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateCustomFields template_custom_fields:
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_custom_fields_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.update_custom_fields_with_http_info(account_id, template_id, **kwargs)
return data
def update_custom_fields_with_http_info(self, account_id, template_id, **kwargs):
"""
Updates envelope custom fields in a template.
Updates the custom fields in a template. Each custom field used in a template must have a unique name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_custom_fields_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateCustomFields template_custom_fields:
:return: CustomFields
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'template_custom_fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_custom_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_custom_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_custom_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/custom_fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_custom_fields' in params:
body_params = params['template_custom_fields']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomFields',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_document(self, account_id, document_id, template_id, **kwargs):
"""
Adds a document to a template document.
Adds the specified document to an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_document(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str is_envelope_definition:
:param EnvelopeDefinition envelope_definition:
:return: EnvelopeDocument
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_document_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.update_document_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def update_document_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Adds a document to a template document.
Adds the specified document to an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_document_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str is_envelope_definition:
:param EnvelopeDefinition envelope_definition:
:return: EnvelopeDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'is_envelope_definition', 'envelope_definition']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_document`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `update_document`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_document`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'is_envelope_definition' in params:
query_params['is_envelope_definition'] = params['is_envelope_definition']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'envelope_definition' in params:
body_params = params['envelope_definition']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EnvelopeDocument',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_document_fields(self, account_id, document_id, template_id, **kwargs):
"""
Updates existing custom document fields in an existing template document.
Updates existing custom document fields in an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_document_fields(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentFieldsInformation document_fields_information:
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.update_document_fields_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def update_document_fields_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Updates existing custom document fields in an existing template document.
Updates existing custom document fields in an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_document_fields_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param DocumentFieldsInformation document_fields_information:
:return: DocumentFieldsInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'document_fields_information']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_document_fields" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_document_fields`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `update_document_fields`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_document_fields`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/fields'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document_fields_information' in params:
body_params = params['document_fields_information']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentFieldsInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_documents(self, account_id, template_id, **kwargs):
"""
Adds documents to a template document.
Adds one or more documents to an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_documents(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param EnvelopeDefinition envelope_definition:
:return: TemplateDocumentsResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_documents_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.update_documents_with_http_info(account_id, template_id, **kwargs)
return data
def update_documents_with_http_info(self, account_id, template_id, **kwargs):
"""
Adds documents to a template document.
Adds one or more documents to an existing template document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_documents_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param EnvelopeDefinition envelope_definition:
:return: TemplateDocumentsResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'envelope_definition']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_documents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_documents`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_documents`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'envelope_definition' in params:
body_params = params['envelope_definition']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateDocumentsResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_group_share(self, account_id, template_id, template_part, **kwargs):
"""
Shares a template with a group
Shares a template with the specified members group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group_share(account_id, template_id, template_part, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str template_part: Currently, the only defined part is **groups**. (required)
:param GroupInformation group_information:
:return: GroupInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_group_share_with_http_info(account_id, template_id, template_part, **kwargs)
else:
(data) = self.update_group_share_with_http_info(account_id, template_id, template_part, **kwargs)
return data
def update_group_share_with_http_info(self, account_id, template_id, template_part, **kwargs):
"""
Shares a template with a group
Shares a template with the specified members group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group_share_with_http_info(account_id, template_id, template_part, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str template_part: Currently, the only defined part is **groups**. (required)
:param GroupInformation group_information:
:return: GroupInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'template_part', 'group_information']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_group_share" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_group_share`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_group_share`")
# verify the required parameter 'template_part' is set
if ('template_part' not in params) or (params['template_part'] is None):
raise ValueError("Missing the required parameter `template_part` when calling `update_group_share`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/{templatePart}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
if 'template_part' in params:
path_params['templatePart'] = params['template_part']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'group_information' in params:
body_params = params['group_information']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GroupInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_lock(self, account_id, template_id, **kwargs):
"""
Updates a template lock.
Updates the lock duration time or update the `lockedByApp` property information for the specified template. The user and integrator key must match the user specified by the `lockByUser` property and integrator key information and the `X-DocuSign-Edit` header must be included or an error will be generated.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_lock(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param LockRequest lock_request:
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_lock_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.update_lock_with_http_info(account_id, template_id, **kwargs)
return data
def update_lock_with_http_info(self, account_id, template_id, **kwargs):
"""
Updates a template lock.
Updates the lock duration time or update the `lockedByApp` property information for the specified template. The user and integrator key must match the user specified by the `lockByUser` property and integrator key information and the `X-DocuSign-Edit` header must be included or an error will be generated.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_lock_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param LockRequest lock_request:
:return: LockInformation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'lock_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_lock" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_lock`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_lock`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/lock'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'lock_request' in params:
body_params = params['lock_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LockInformation',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_notification_settings(self, account_id, template_id, **kwargs):
"""
Updates the notification structure for an existing template.
Updates the notification structure for an existing template. Use this endpoint to set reminder and expiration notifications.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_notification_settings(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateNotificationRequest template_notification_request:
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_notification_settings_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.update_notification_settings_with_http_info(account_id, template_id, **kwargs)
return data
def update_notification_settings_with_http_info(self, account_id, template_id, **kwargs):
"""
Updates the notification structure for an existing template.
Updates the notification structure for an existing template. Use this endpoint to set reminder and expiration notifications.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_notification_settings_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateNotificationRequest template_notification_request:
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'template_notification_request']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_notification_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_notification_settings`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_notification_settings`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/notification'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_notification_request' in params:
body_params = params['template_notification_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Notification',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_recipients(self, account_id, template_id, **kwargs):
"""
Updates recipients in a template.
Updates recipients in a template. You can edit the following properties: `email`, `userName`, `routingOrder`, `faxNumber`, `deliveryMethod`, `accessCode`, and `requireIdLookup`.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_recipients(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str resend_envelope:
:param TemplateRecipients template_recipients:
:return: RecipientsUpdateSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_recipients_with_http_info(account_id, template_id, **kwargs)
else:
(data) = self.update_recipients_with_http_info(account_id, template_id, **kwargs)
return data
def update_recipients_with_http_info(self, account_id, template_id, **kwargs):
"""
Updates recipients in a template.
Updates recipients in a template. You can edit the following properties: `email`, `userName`, `routingOrder`, `faxNumber`, `deliveryMethod`, `accessCode`, and `requireIdLookup`.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_recipients_with_http_info(account_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str template_id: The ID of the template being accessed. (required)
:param str resend_envelope:
:param TemplateRecipients template_recipients:
:return: RecipientsUpdateSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'template_id', 'resend_envelope', 'template_recipients']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_recipients`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_recipients`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
if 'resend_envelope' in params:
query_params['resend_envelope'] = params['resend_envelope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_recipients' in params:
body_params = params['template_recipients']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RecipientsUpdateSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_tabs(self, account_id, recipient_id, template_id, **kwargs):
"""
Updates the tabs for a recipient.
Updates one or more tabs for a recipient in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_tabs(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
else:
(data) = self.update_tabs_with_http_info(account_id, recipient_id, template_id, **kwargs)
return data
def update_tabs_with_http_info(self, account_id, recipient_id, template_id, **kwargs):
"""
Updates the tabs for a recipient.
Updates one or more tabs for a recipient in a template.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_tabs_with_http_info(account_id, recipient_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str recipient_id: The ID of the recipient being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'recipient_id', 'template_id', 'template_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_tabs`")
# verify the required parameter 'recipient_id' is set
if ('recipient_id' not in params) or (params['recipient_id'] is None):
raise ValueError("Missing the required parameter `recipient_id` when calling `update_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/recipients/{recipientId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'recipient_id' in params:
path_params['recipientId'] = params['recipient_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_tabs' in params:
body_params = params['template_tabs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_template_document_tabs(self, account_id, document_id, template_id, **kwargs):
"""
Updates the tabs for a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_template_document_tabs(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_template_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
else:
(data) = self.update_template_document_tabs_with_http_info(account_id, document_id, template_id, **kwargs)
return data
def update_template_document_tabs_with_http_info(self, account_id, document_id, template_id, **kwargs):
"""
Updates the tabs for a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_template_document_tabs_with_http_info(account_id, document_id, template_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: The external account number (int) or account ID Guid. (required)
:param str document_id: The ID of the document being accessed. (required)
:param str template_id: The ID of the template being accessed. (required)
:param TemplateTabs template_tabs:
:return: Tabs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'document_id', 'template_id', 'template_tabs']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_template_document_tabs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `update_template_document_tabs`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `update_template_document_tabs`")
# verify the required parameter 'template_id' is set
if ('template_id' not in params) or (params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `update_template_document_tabs`")
collection_formats = {}
resource_path = '/v2.1/accounts/{accountId}/templates/{templateId}/documents/{documentId}/tabs'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['accountId'] = params['account_id']
if 'document_id' in params:
path_params['documentId'] = params['document_id']
if 'template_id' in params:
path_params['templateId'] = params['template_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'template_tabs' in params:
body_params = params['template_tabs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tabs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 50.218376
| 2,180
| 0.608115
| 34,396
| 314,819
| 5.337597
| 0.014595
| 0.043335
| 0.021047
| 0.020393
| 0.987559
| 0.984863
| 0.982254
| 0.980745
| 0.979612
| 0.977423
| 0
| 0.000587
| 0.313329
| 314,819
| 6,268
| 2,181
| 50.226388
| 0.84868
| 0.364444
| 0
| 0.82161
| 0
| 0.001267
| 0.227177
| 0.059354
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03327
| false
| 0
| 0.002218
| 0
| 0.085234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
041107094dc81c6940c5a007403bdb70a7e7bddf
| 5,137
|
py
|
Python
|
tests/test_implicit_flow.py
|
yaal-fr/canaille
|
828d190adea7bc6e34d59bac42cbc1283509880b
|
[
"MIT"
] | 3
|
2020-11-03T14:44:53.000Z
|
2021-09-26T16:49:01.000Z
|
tests/test_implicit_flow.py
|
yaal-fr/canaille
|
828d190adea7bc6e34d59bac42cbc1283509880b
|
[
"MIT"
] | null | null | null |
tests/test_implicit_flow.py
|
yaal-fr/canaille
|
828d190adea7bc6e34d59bac42cbc1283509880b
|
[
"MIT"
] | null | null | null |
from authlib.jose import jwt
from urllib.parse import urlsplit, parse_qs
from canaille.models import Token
def test_oauth_implicit(testclient, slapd_connection, user, client):
client.oauthGrantType = ["token"]
client.oauthTokenEndpointAuthMethod = "none"
client.save(slapd_connection)
res = testclient.get(
"/oauth/authorize",
params=dict(
response_type="token",
client_id=client.oauthClientID,
scope="profile",
nonce="somenonce",
),
)
assert "text/html" == res.content_type
res.form["login"] = "user"
res.form["password"] = "correct horse battery staple"
res = res.form.submit(status=302)
res = res.follow()
assert "text/html" == res.content_type, res.json
res = res.form.submit(name="answer", value="accept", status=302)
assert res.location.startswith(client.oauthRedirectURIs[0])
params = parse_qs(urlsplit(res.location).fragment)
access_token = params["access_token"][0]
token = Token.get(access_token, conn=slapd_connection)
assert token is not None
res = testclient.get(
"/oauth/userinfo", headers={"Authorization": f"Bearer {access_token}"}
)
assert "application/json" == res.content_type
assert {
"name": "John Doe",
"sub": "user",
"family_name": "Doe",
"groups": [],
} == res.json
client.oauthGrantType = ["code"]
client.oauthTokenEndpointAuthMethod = "client_secret_basic"
client.save(slapd_connection)
def test_oidc_implicit(
testclient, keypair, slapd_connection, user, client, other_client
):
client.oauthGrantType = ["token id_token"]
client.oauthTokenEndpointAuthMethod = "none"
client.save(slapd_connection)
res = testclient.get(
"/oauth/authorize",
params=dict(
response_type="id_token token",
client_id=client.oauthClientID,
scope="openid profile",
nonce="somenonce",
),
)
assert "text/html" == res.content_type
res.form["login"] = "user"
res.form["password"] = "correct horse battery staple"
res = res.form.submit(status=302)
res = res.follow(status=200)
assert "text/html" == res.content_type, res.json
res = res.form.submit(name="answer", value="accept", status=302)
assert res.location.startswith(client.oauthRedirectURIs[0])
params = parse_qs(urlsplit(res.location).fragment)
access_token = params["access_token"][0]
token = Token.get(access_token, conn=slapd_connection)
assert token is not None
id_token = params["id_token"][0]
claims = jwt.decode(id_token, keypair[1])
assert user.uid[0] == claims["sub"]
assert user.cn[0] == claims["name"]
assert [client.oauthClientID, other_client.oauthClientID] == claims["aud"]
res = testclient.get(
"/oauth/userinfo",
headers={"Authorization": f"Bearer {access_token}"},
status=200,
)
assert "application/json" == res.content_type
assert {
"name": "John Doe",
"sub": "user",
"family_name": "Doe",
"groups": [],
} == res.json
client.oauthGrantType = ["code"]
client.oauthTokenEndpointAuthMethod = "client_secret_basic"
client.save(slapd_connection)
def test_oidc_implicit_with_group(
testclient, keypair, slapd_connection, user, client, foo_group, other_client
):
client.oauthGrantType = ["token id_token"]
client.oauthTokenEndpointAuthMethod = "none"
client.save(slapd_connection)
res = testclient.get(
"/oauth/authorize",
params=dict(
response_type="id_token token",
client_id=client.oauthClientID,
scope="openid profile groups",
nonce="somenonce",
),
)
assert "text/html" == res.content_type
res.form["login"] = "user"
res.form["password"] = "correct horse battery staple"
res = res.form.submit(status=302)
res = res.follow(status=200)
assert "text/html" == res.content_type, res.json
res = res.form.submit(name="answer", value="accept", status=302)
assert res.location.startswith(client.oauthRedirectURIs[0])
params = parse_qs(urlsplit(res.location).fragment)
access_token = params["access_token"][0]
token = Token.get(access_token, conn=slapd_connection)
assert token is not None
id_token = params["id_token"][0]
claims = jwt.decode(id_token, keypair[1])
assert user.uid[0] == claims["sub"]
assert user.cn[0] == claims["name"]
assert [client.oauthClientID, other_client.oauthClientID] == claims["aud"]
assert ["foo"] == claims["groups"]
res = testclient.get(
"/oauth/userinfo",
headers={"Authorization": f"Bearer {access_token}"},
status=200,
)
assert "application/json" == res.content_type
assert {
"name": "John Doe",
"sub": "user",
"family_name": "Doe",
"groups": ["foo"],
} == res.json
client.oauthGrantType = ["code"]
client.oauthTokenEndpointAuthMethod = "client_secret_basic"
client.save(slapd_connection)
| 29.866279
| 80
| 0.642593
| 582
| 5,137
| 5.542955
| 0.16323
| 0.055797
| 0.039058
| 0.046497
| 0.934594
| 0.934594
| 0.897086
| 0.897086
| 0.897086
| 0.897086
| 0
| 0.010997
| 0.221141
| 5,137
| 171
| 81
| 30.040936
| 0.795301
| 0
| 0
| 0.830882
| 0
| 0
| 0.17228
| 0
| 0
| 0
| 0
| 0
| 0.183824
| 1
| 0.022059
| false
| 0.022059
| 0.022059
| 0
| 0.044118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0434419c384431baf765e6523ecd6c449dd0a84d
| 146
|
py
|
Python
|
playwell-rpa/playwell_rpa/launcher.py
|
jiyulongxu/playwell
|
3a5dc4d009c6fd75487e208edf0318db4f9ad21d
|
[
"Apache-2.0"
] | 4
|
2019-09-01T02:05:09.000Z
|
2022-01-04T06:08:14.000Z
|
playwell-rpa/playwell_rpa/launcher.py
|
jiyulongxu/playwell
|
3a5dc4d009c6fd75487e208edf0318db4f9ad21d
|
[
"Apache-2.0"
] | null | null | null |
playwell-rpa/playwell_rpa/launcher.py
|
jiyulongxu/playwell
|
3a5dc4d009c6fd75487e208edf0318db4f9ad21d
|
[
"Apache-2.0"
] | 6
|
2019-11-14T13:55:17.000Z
|
2022-02-09T01:42:24.000Z
|
"""RPA launcher
"""
def launch():
# 加载web driver
from playwell_rpa.browser import init_web_driver_manager
init_web_driver_manager()
| 16.222222
| 60
| 0.726027
| 19
| 146
| 5.210526
| 0.684211
| 0.141414
| 0.262626
| 0.40404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184932
| 146
| 8
| 61
| 18.25
| 0.831933
| 0.178082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
044acb6344b74cf28d7bf66314df9a4b6f6cf08d
| 2,281
|
py
|
Python
|
vote/migrations/0008_auto_20210309_0029.py
|
jnegrete2005/JuradoFMS
|
25848037e51de1781c419155615d0fb41edc07ec
|
[
"MIT"
] | 2
|
2021-02-24T21:57:50.000Z
|
2021-03-15T08:44:09.000Z
|
vote/migrations/0008_auto_20210309_0029.py
|
jnegrete2005/JuradoFMS
|
25848037e51de1781c419155615d0fb41edc07ec
|
[
"MIT"
] | null | null | null |
vote/migrations/0008_auto_20210309_0029.py
|
jnegrete2005/JuradoFMS
|
25848037e51de1781c419155615d0fb41edc07ec
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.5 on 2021-03-09 05:29
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vote', '0007_auto_20210309_0028'),
]
operations = [
migrations.AlterField(
model_name='competitor',
name='deluxe',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=14),
),
migrations.AlterField(
model_name='competitor',
name='easy',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9),
),
migrations.AlterField(
model_name='competitor',
name='hard',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9),
),
migrations.AlterField(
model_name='competitor',
name='min1',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9, verbose_name='minuto 1'),
),
migrations.AlterField(
model_name='competitor',
name='min2',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9, verbose_name='minuto 2'),
),
migrations.AlterField(
model_name='competitor',
name='random_score',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9),
),
migrations.AlterField(
model_name='competitor',
name='replica',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=9),
),
migrations.AlterField(
model_name='competitor',
name='tematicas',
field=django.contrib.postgres.fields.ArrayField(base_field=models.PositiveSmallIntegerField(), blank=True, null=True, size=7),
),
]
| 41.472727
| 163
| 0.641824
| 227
| 2,281
| 6.352423
| 0.246696
| 0.081137
| 0.131068
| 0.168516
| 0.837725
| 0.837725
| 0.718447
| 0.718447
| 0.718447
| 0.718447
| 0
| 0.025273
| 0.236738
| 2,281
| 54
| 164
| 42.240741
| 0.802987
| 0.019728
| 0
| 0.583333
| 1
| 0
| 0.07744
| 0.010295
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9bd84e19a2b91b999343f716ce8aaafaaa910b1b
| 147
|
py
|
Python
|
backend/benefit/messages/tests/conftest.py
|
City-of-Helsinki/kesaseteli
|
964f801c2dba72c4105b6e436b12b821b199d6d2
|
[
"MIT"
] | 2
|
2021-05-10T09:28:35.000Z
|
2021-05-17T12:15:34.000Z
|
backend/benefit/messages/tests/conftest.py
|
City-of-Helsinki/yjdh
|
1c07576b456d2be9c3171363450ed46de2c1bbcb
|
[
"MIT"
] | 931
|
2021-05-21T15:24:35.000Z
|
2022-03-31T20:07:40.000Z
|
backend/benefit/messages/tests/conftest.py
|
City-of-Helsinki/yjdh
|
1c07576b456d2be9c3171363450ed46de2c1bbcb
|
[
"MIT"
] | 6
|
2021-07-06T11:07:02.000Z
|
2022-02-07T12:42:21.000Z
|
from applications.tests.conftest import * # noqa
from common.tests.conftest import * # noqa
from helsinkibenefit.tests.conftest import * # noqa
| 36.75
| 52
| 0.77551
| 18
| 147
| 6.333333
| 0.444444
| 0.342105
| 0.5
| 0.605263
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 147
| 3
| 53
| 49
| 0.904762
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9bd9b6c6006ee95050fb456789b696ace22b50c4
| 272
|
py
|
Python
|
modAL/__init__.py
|
nibydlo/modAL
|
c0fe0200001c8c34e3fabb099fb70cf1e4bfb680
|
[
"MIT"
] | 2
|
2020-01-22T14:34:01.000Z
|
2020-01-22T14:51:18.000Z
|
modAL/__init__.py
|
nibydlo/modAL
|
c0fe0200001c8c34e3fabb099fb70cf1e4bfb680
|
[
"MIT"
] | null | null | null |
modAL/__init__.py
|
nibydlo/modAL
|
c0fe0200001c8c34e3fabb099fb70cf1e4bfb680
|
[
"MIT"
] | null | null | null |
from .models import ActiveLearner, Committee, CommitteeRegressor, KerasActiveLearner, DropoutActiveLearner, LearningLossActiveLearner
__all__ = ['ActiveLearner', 'Committee', 'CommitteeRegressor', 'KerasActiveLearner', 'DropoutActiveLearner', 'LearningLossActiveLearner']
| 90.666667
| 137
| 0.841912
| 16
| 272
| 14.0625
| 0.625
| 0.195556
| 0.355556
| 0.515556
| 0.915556
| 0.915556
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 272
| 3
| 137
| 90.666667
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0.377289
| 0.091575
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
5002e8b05ba70b737b8dae488a2b8b56ba7e0299
| 168,195
|
py
|
Python
|
imcsdk/imcmeta.py
|
ragupta-git/ImcSdk
|
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/imcmeta.py
|
ragupta-git/ImcSdk
|
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/imcmeta.py
|
ragupta-git/ImcSdk
|
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
|
[
"Apache-2.0"
] | 3
|
2018-11-14T13:02:40.000Z
|
2018-11-14T13:49:38.000Z
|
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This is an auto-generated module containing ManagedObject Meta information. """
from .imccoremeta import ImcVersion
from .imccoremeta import MoMeta
class VersionMeta:
""" This class contains all the ImcVersion supported by this package."""
Version151f = ImcVersion("1.5(1f)")
Version151x = ImcVersion("1.5(1x)")
Version152 = ImcVersion("152")
Version153 = ImcVersion("153")
Version154 = ImcVersion("154")
Version2010b = ImcVersion("2.0(10b)")
Version2013e = ImcVersion("2.0(13e)")
Version201a = ImcVersion("2.0(1a)")
Version202c = ImcVersion("2.0(2c)")
Version203d = ImcVersion("2.0(3d)")
Version204c = ImcVersion("2.0(4c)")
Version208d = ImcVersion("2.0(8d)")
Version209c = ImcVersion("2.0(9c)")
Version301c = ImcVersion("3.0(1c)")
Version302b = ImcVersion("3.0(2b)")
Version303a = ImcVersion("3.0(3a)")
Version311d = ImcVersion("3.1(1d)")
MO_CLASS_ID = frozenset([
"AaaLdap",
"AaaLdapRoleGroup",
"AaaSession",
"AaaUser",
"AaaUserEp",
"AaaUserPasswordExpiration",
"AaaUserPolicy",
"ActivatePIDCatalog",
"AdaptorCfgBackup",
"AdaptorCfgImporter",
"AdaptorConnectorInfo",
"AdaptorEthCompQueueProfile",
"AdaptorEthGenProfile",
"AdaptorEthISCSIProfile",
"AdaptorEthInterruptProfile",
"AdaptorEthOffloadProfile",
"AdaptorEthRdmaProfile",
"AdaptorEthRecvQueueProfile",
"AdaptorEthUSNICProfile",
"AdaptorEthWorkQueueProfile",
"AdaptorExtEthIf",
"AdaptorExtIpV6RssHashProfile",
"AdaptorFcBootTable",
"AdaptorFcCdbWorkQueueProfile",
"AdaptorFcErrorRecoveryProfile",
"AdaptorFcGenProfile",
"AdaptorFcInterruptProfile",
"AdaptorFcPersistentBindings",
"AdaptorFcPortFLogiProfile",
"AdaptorFcPortPLogiProfile",
"AdaptorFcPortProfile",
"AdaptorFcRecvQueueProfile",
"AdaptorFcWorkQueueProfile",
"AdaptorGenProfile",
"AdaptorHostEthIf",
"AdaptorHostFcIf",
"AdaptorIpV4RssHashProfile",
"AdaptorIpV6RssHashProfile",
"AdaptorLinkTraining",
"AdaptorPortProfiles",
"AdaptorRssProfile",
"AdaptorUnit",
"AdvancedPowerProfile",
"BiosBOT",
"BiosBootDev",
"BiosBootDevGrp",
"BiosBootDevPrecision",
"BiosBootMode",
"BiosPassword",
"BiosPlatformDefaults",
"BiosProfile",
"BiosProfileManagement",
"BiosProfileToken",
"BiosSettings",
"BiosUnit",
"BiosVfASPMSupport",
"BiosVfAdjacentCacheLinePrefetch",
"BiosVfAltitude",
"BiosVfAssertNMIOnPERR",
"BiosVfAssertNMIOnSERR",
"BiosVfAutonumousCstateEnable",
"BiosVfBootOptionNumRetry",
"BiosVfBootOptionReCoolDown",
"BiosVfBootOptionRetry",
"BiosVfBootPerformanceMode",
"BiosVfCDNEnable",
"BiosVfCDNSupport",
"BiosVfCPUEnergyPerformance",
"BiosVfCPUFrequencyFloor",
"BiosVfCPUPerformance",
"BiosVfCPUPowerManagement",
"BiosVfCkeLowPolicy",
"BiosVfClosedLoopThermThrotl",
"BiosVfCmciEnable",
"BiosVfConfigTDP",
"BiosVfConsoleRedirection",
"BiosVfCoreMultiProcessing",
"BiosVfDCUPrefetch",
"BiosVfDRAMClockThrottling",
"BiosVfDemandScrub",
"BiosVfDirectCacheAccess",
"BiosVfDramRefreshRate",
"BiosVfEngPerfTuning",
"BiosVfEnhancedIntelSpeedStepTech",
"BiosVfExecuteDisableBit",
"BiosVfExtendedAPIC",
"BiosVfFRB2Enable",
"BiosVfHWPMEnable",
"BiosVfHardwarePrefetch",
"BiosVfIMCInterleave",
"BiosVfIOHResource",
"BiosVfIPV6PXE",
"BiosVfIntelHyperThreadingTech",
"BiosVfIntelTurboBoostTech",
"BiosVfIntelVTForDirectedIO",
"BiosVfIntelVirtualizationTechnology",
"BiosVfIohErrorEn",
"BiosVfKTIPrefetch",
"BiosVfLLCPrefetch",
"BiosVfLOMPortOptionROM",
"BiosVfLegacyUSBSupport",
"BiosVfLvDIMMSupport",
"BiosVfMMCFGBase",
"BiosVfMemoryInterleave",
"BiosVfMemoryMappedIOAbove4GB",
"BiosVfMirroringMode",
"BiosVfNUMAOptimized",
"BiosVfOSBootWatchdogTimer",
"BiosVfOSBootWatchdogTimerPolicy",
"BiosVfOSBootWatchdogTimerTimeout",
"BiosVfOnboardNIC",
"BiosVfOnboardStorage",
"BiosVfOnboardStorageSWStack",
"BiosVfOutOfBandMgmtPort",
"BiosVfPCIOptionROMs",
"BiosVfPCISlotOptionROMEnable",
"BiosVfPCIeSSDHotPlugSupport",
"BiosVfPOSTErrorPause",
"BiosVfPSata",
"BiosVfPStateCoordType",
"BiosVfPackageCStateLimit",
"BiosVfPatrolScrub",
"BiosVfPatrolScrubDuration",
"BiosVfPchUsb30Mode",
"BiosVfPciRomClp",
"BiosVfPowerOnPasswordSupport",
"BiosVfProcessorC1E",
"BiosVfProcessorC3Report",
"BiosVfProcessorC6Report",
"BiosVfProcessorCState",
"BiosVfPwrPerfTuning",
"BiosVfQPIConfig",
"BiosVfQpiSnoopMode",
"BiosVfResumeOnACPowerLoss",
"BiosVfSataModeSelect",
"BiosVfSelectMemoryRASConfiguration",
"BiosVfSerialPortAEnable",
"BiosVfSinglePCTLEnable",
"BiosVfSparingMode",
"BiosVfSrIov",
"BiosVfSubNumaClustering",
"BiosVfTPMControl",
"BiosVfTPMSupport",
"BiosVfUCSMBootOrderRuleControl",
"BiosVfUSBBootConfig",
"BiosVfUSBEmulation",
"BiosVfUSBPortsConfig",
"BiosVfUsbXhciSupport",
"BiosVfVgaPriority",
"BiosVfWorkLoadConfig",
"BiosVfXPTPrefetch",
"CertificateManagement",
"CommHttp",
"CommHttps",
"CommIpmiLan",
"CommKvm",
"CommMailAlert",
"CommNtpProvider",
"CommRedfish",
"CommSavedVMediaMap",
"CommSnmp",
"CommSnmpTrap",
"CommSnmpUser",
"CommSsh",
"CommSvcEp",
"CommSyslog",
"CommSyslogClient",
"CommVMedia",
"CommVMediaMap",
"ComputeBoard",
"ComputeMbPowerStats",
"ComputeRackUnit",
"ComputeRackUnitMbTempStats",
"CurrentCertificate",
"DownloadClientCertificate",
"DownloadClientPrivateKey",
"DownloadLdapCACertificate",
"DownloadRootCACertificate",
"EquipmentFan",
"EquipmentFanModule",
"EquipmentIndicatorLed",
"EquipmentLocatorLed",
"EquipmentPsu",
"EquipmentPsuColdRedundancy",
"EquipmentPsuFan",
"EquipmentTpm",
"Error",
"EventManagement",
"ExportClientCertificate",
"ExportClientPrivateKey",
"ExportLdapCACertificate",
"ExportRootCACertificate",
"FanPolicy",
"FaultInst",
"FirmwareBootDefinition",
"FirmwareBootUnit",
"FirmwareRunning",
"FirmwareUpdatable",
"GenerateCertificateSigningRequest",
"GenerateRandomPassword",
"GeneratedStorageControllerKeyId",
"GpuInventory",
"HuuController",
"HuuFirmwareCatalog",
"HuuFirmwareCatalogComponent",
"HuuFirmwareComponent",
"HuuFirmwareRunning",
"HuuFirmwareUpdateCancel",
"HuuFirmwareUpdateStatus",
"HuuFirmwareUpdater",
"HuuUpdateComponentStatus",
"IodController",
"IodSnapshotCancel",
"IodSnapshotStart",
"IodSnapshotStatus",
"IpBlocking",
"IpFiltering",
"KmipManagement",
"KmipServer",
"KmipServerLogin",
"LdapCACertificate",
"LdapCACertificateManagement",
"LsbootBootSecurity",
"LsbootCdd",
"LsbootDef",
"LsbootDevPrecision",
"LsbootEfi",
"LsbootHdd",
"LsbootIscsi",
"LsbootLan",
"LsbootLocalStorage",
"LsbootNVMe",
"LsbootPchStorage",
"LsbootPxe",
"LsbootSan",
"LsbootSd",
"LsbootStorage",
"LsbootUefiShell",
"LsbootUsb",
"LsbootVMedia",
"LsbootVirtualMedia",
"MailRecipient",
"MemoryArray",
"MemoryUnit",
"MemoryUnitEnvStats",
"MgmtBackup",
"MgmtController",
"MgmtIf",
"MgmtImporter",
"MgmtInventory",
"NetworkAdapterEthIf",
"NetworkAdapterUnit",
"OneTimeBootDevice",
"OneTimePrecisionBootDevice",
"OsiCancel",
"OsiController",
"OsiStart",
"OsiStatus",
"PciEquipSlot",
"PidCatalog",
"PidCatalogCpu",
"PidCatalogDimm",
"PidCatalogHdd",
"PidCatalogPCIAdapter",
"PlatformEventFilters",
"PowerBudget",
"PowerMonitor",
"ProcessorEnvStats",
"ProcessorUnit",
"SelfEncryptStorageController",
"ServerUtilization",
"SolIf",
"StandardPowerProfile",
"StorageController",
"StorageControllerHealth",
"StorageControllerNVMe",
"StorageControllerProps",
"StorageControllerSettings",
"StorageFlexFlashController",
"StorageFlexFlashControllerProps",
"StorageFlexFlashOperationalProfile",
"StorageFlexFlashPhysicalDrive",
"StorageFlexFlashVirtualDrive",
"StorageFlexFlashVirtualDriveImageMap",
"StorageFlexUtilController",
"StorageFlexUtilHealth",
"StorageFlexUtilOperationalProfile",
"StorageFlexUtilPhysicalDrive",
"StorageFlexUtilVirtualDrive",
"StorageFlexUtilVirtualDriveImageMap",
"StorageLocalDisk",
"StorageLocalDiskProps",
"StorageLocalDiskSlotEp",
"StorageLocalDiskUsage",
"StorageNVMePhysicalDrive",
"StorageOperation",
"StorageRaidBattery",
"StorageSasExpander",
"StorageUnusedLocalDisk",
"StorageVirtualDrive",
"StorageVirtualDriveCreatorUsingUnusedPhysicalDrive",
"StorageVirtualDriveCreatorUsingVirtualDriveGroup",
"StorageVirtualDriveWithDriveGroupSpace",
"SuggestedStorageControllerSecurityKey",
"SysdebugMEpLog",
"SysdebugTechSupportExport",
"SystemBoardUnit",
"SystemIOController",
"TopRoot",
"TopSystem",
"UploadBiosProfile",
"UploadCertificate",
"UploadPIDCatalog",
"VicBackupAll",
"VicImporterAll",
"X86LiveDebug",
"AutoPowerProfile",
"BmcResetReason",
"ChassisPIDCatalog",
"ChassisPowerBudget",
"ChassisPowerMonitor",
"ChassisPowerUtilization",
"CommEpIpmiLan",
"CommSvcRack",
"ComputeServerNode",
"ComputeServerNodeMbTempStats",
"ComputeServerRef",
"ComputeSharedIOMbPowerStats",
"ComputeSharedIOMbTempStats",
"CustomPowerProfile",
"EquipmentChassis",
"EquipmentChassisLocatorLed",
"EquipmentSharedIOModule",
"EquipmentSystemIOController",
"IoExpander",
"MgmtBackupServer",
"MgmtImporterServer",
"SiocResetReason",
"StorageEnclosure",
"StorageEnclosureDisk",
"StorageEnclosureDiskFwHelper",
"StorageEnclosureDiskSlotEp",
"StorageEnclosureDiskSlotZoneHelper",
"StorageLocalDiskEp",
"StorageSasUplink",
"ThermalPowerProfile",
])
MO_CLASS_META = {
"classic": {
"AaaLdap": MoMeta("AaaLdap", "aaaLdap", "ldap-ext", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'aaaLdapRoleGroup', u'ldapCACertificateManagement'], ["Get", "Set"]),
"AaaLdapRoleGroup": MoMeta("AaaLdapRoleGroup", "aaaLdapRoleGroup", "rolegroup-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'aaaLdap'], [], ["Get", "Set"]),
"AaaSession": MoMeta("AaaSession", "aaaSession", "term-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'aaaUserEp'], [], ["Get"]),
"AaaUser": MoMeta("AaaUser", "aaaUser", "user-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'aaaUserEp'], [], ["Get", "Set"]),
"AaaUserEp": MoMeta("AaaUserEp", "aaaUserEp", "user-ext", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'aaaSession', u'aaaUser', u'aaaUserPasswordExpiration', u'aaaUserPolicy', u'generateRandomPassword'], ["Get"]),
"AaaUserPasswordExpiration": MoMeta("AaaUserPasswordExpiration", "aaaUserPasswordExpiration", "password-expiration", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get", "Set"]),
"AaaUserPolicy": MoMeta("AaaUserPolicy", "aaaUserPolicy", "policy", VersionMeta.Version209c, "InputOutput", 0x1, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get", "Set"]),
"ActivatePIDCatalog": MoMeta("ActivatePIDCatalog", "activatePIDCatalog", "activate-catalog", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get", "Set"]),
"AdaptorCfgBackup": MoMeta("AdaptorCfgBackup", "adaptorCfgBackup", "export-config", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Get", "Set"]),
"AdaptorCfgImporter": MoMeta("AdaptorCfgImporter", "adaptorCfgImporter", "import-config", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Get", "Set"]),
"AdaptorConnectorInfo": MoMeta("AdaptorConnectorInfo", "adaptorConnectorInfo", "connector-info", VersionMeta.Version204c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorExtEthIf'], [], ["Get"]),
"AdaptorEthCompQueueProfile": MoMeta("AdaptorEthCompQueueProfile", "adaptorEthCompQueueProfile", "eth-comp-q", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthGenProfile": MoMeta("AdaptorEthGenProfile", "adaptorEthGenProfile", "general", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthISCSIProfile": MoMeta("AdaptorEthISCSIProfile", "adaptorEthISCSIProfile", "ethiscsi", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Add", "Get", "Remove", "Set"]),
"AdaptorEthInterruptProfile": MoMeta("AdaptorEthInterruptProfile", "adaptorEthInterruptProfile", "eth-int", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthOffloadProfile": MoMeta("AdaptorEthOffloadProfile", "adaptorEthOffloadProfile", "eth-offload", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthRdmaProfile": MoMeta("AdaptorEthRdmaProfile", "adaptorEthRdmaProfile", "rdmaprofile", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthRecvQueueProfile": MoMeta("AdaptorEthRecvQueueProfile", "adaptorEthRecvQueueProfile", "eth-rcv-q", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthUSNICProfile": MoMeta("AdaptorEthUSNICProfile", "adaptorEthUSNICProfile", "ethusnic", VersionMeta.Version151x, "InputOutput", 0x1, [], ["admin"], [u'adaptorHostEthIf'], [], ["Get", "Remove", "Set"]),
"AdaptorEthWorkQueueProfile": MoMeta("AdaptorEthWorkQueueProfile", "adaptorEthWorkQueueProfile", "eth-work-q", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorExtEthIf": MoMeta("AdaptorExtEthIf", "adaptorExtEthIf", "ext-eth-[port_id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [u'adaptorConnectorInfo', u'adaptorLinkTraining', u'adaptorPortProfiles'], ["Get", "Set"]),
"AdaptorExtIpV6RssHashProfile": MoMeta("AdaptorExtIpV6RssHashProfile", "adaptorExtIpV6RssHashProfile", "ext-ipv6-rss-hash", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorFcBootTable": MoMeta("AdaptorFcBootTable", "adaptorFcBootTable", "fcboot-[index]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Add", "Get", "Set"]),
"AdaptorFcCdbWorkQueueProfile": MoMeta("AdaptorFcCdbWorkQueueProfile", "adaptorFcCdbWorkQueueProfile", "fc-cdb-work-q", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcErrorRecoveryProfile": MoMeta("AdaptorFcErrorRecoveryProfile", "adaptorFcErrorRecoveryProfile", "fc-err-rec", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcGenProfile": MoMeta("AdaptorFcGenProfile", "adaptorFcGenProfile", "general", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcInterruptProfile": MoMeta("AdaptorFcInterruptProfile", "adaptorFcInterruptProfile", "fc-int", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcPersistentBindings": MoMeta("AdaptorFcPersistentBindings", "adaptorFcPersistentBindings", "perbi-[index]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get"]),
"AdaptorFcPortFLogiProfile": MoMeta("AdaptorFcPortFLogiProfile", "adaptorFcPortFLogiProfile", "fc-port-flogi", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcPortPLogiProfile": MoMeta("AdaptorFcPortPLogiProfile", "adaptorFcPortPLogiProfile", "fc-port-plogi", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcPortProfile": MoMeta("AdaptorFcPortProfile", "adaptorFcPortProfile", "fc-port", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcRecvQueueProfile": MoMeta("AdaptorFcRecvQueueProfile", "adaptorFcRecvQueueProfile", "fc-rcv-q", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcWorkQueueProfile": MoMeta("AdaptorFcWorkQueueProfile", "adaptorFcWorkQueueProfile", "fc-work-q", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorGenProfile": MoMeta("AdaptorGenProfile", "adaptorGenProfile", "general", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Get", "Set"]),
"AdaptorHostEthIf": MoMeta("AdaptorHostEthIf", "adaptorHostEthIf", "host-eth-[name]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Add", "Get", "Remove", "Set"]),
"AdaptorHostFcIf": MoMeta("AdaptorHostFcIf", "adaptorHostFcIf", "host-fc-[name]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [u'adaptorFcPersistentBindings'], ["Add", "Get", "Remove", "Set"]),
"AdaptorIpV4RssHashProfile": MoMeta("AdaptorIpV4RssHashProfile", "adaptorIpV4RssHashProfile", "ipv4-rss-hash", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorIpV6RssHashProfile": MoMeta("AdaptorIpV6RssHashProfile", "adaptorIpV6RssHashProfile", "ipv6-rss-hash", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorLinkTraining": MoMeta("AdaptorLinkTraining", "adaptorLinkTraining", "link-training", VersionMeta.Version204c, "InputOutput", 0x1, [], ["admin", "user"], [u'adaptorExtEthIf'], [], ["Get", "Set"]),
"AdaptorPortProfiles": MoMeta("AdaptorPortProfiles", "adaptorPortProfiles", "port-profiles", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorExtEthIf'], [], ["Get"]),
"AdaptorRssProfile": MoMeta("AdaptorRssProfile", "adaptorRssProfile", "rss", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorUnit": MoMeta("AdaptorUnit", "adaptorUnit", "adaptor-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'adaptorCfgBackup', u'adaptorCfgImporter', u'adaptorExtEthIf', u'adaptorHostEthIf', u'adaptorHostFcIf', u'faultInst', u'mgmtController'], ["Get", "Set"]),
"AdvancedPowerProfile": MoMeta("AdvancedPowerProfile", "advancedPowerProfile", "advpwrprof", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'powerBudget'], [], ["Get", "Set"]),
"BiosBOT": MoMeta("BiosBOT", "biosBOT", "bdgep", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [u'biosBootDevGrp', u'biosBootDevPrecision', u'biosBootMode'], ["Get"]),
"BiosBootDev": MoMeta("BiosBootDev", "biosBootDev", "bdv-[order]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosBootDevGrp'], [], ["Get"]),
"BiosBootDevGrp": MoMeta("BiosBootDevGrp", "biosBootDevGrp", "bdg-[order]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosBOT'], [u'biosBootDev'], ["Get"]),
"BiosBootDevPrecision": MoMeta("BiosBootDevPrecision", "biosBootDevPrecision", "bdvp-[order]", VersionMeta.Version201a, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosBOT'], [], ["Get"]),
"BiosBootMode": MoMeta("BiosBootMode", "biosBootMode", "boot-mode", VersionMeta.Version201a, "OutputOnly", 0x1, [], ["admin", "user"], [u'biosBOT'], [], ["Get"]),
"BiosPassword": MoMeta("BiosPassword", "biosPassword", "bios-pw", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [], [], [None]),
"BiosPlatformDefaults": MoMeta("BiosPlatformDefaults", "biosPlatformDefaults", "bios-defaults", VersionMeta.Version151x, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [u'biosVfASPMSupport', u'biosVfAdjacentCacheLinePrefetch', u'biosVfAltitude', u'biosVfAssertNMIOnPERR', u'biosVfAssertNMIOnSERR', u'biosVfAutonumousCstateEnable', u'biosVfBootOptionNumRetry', u'biosVfBootOptionReCoolDown', u'biosVfBootOptionRetry', u'biosVfBootPerformanceMode', u'biosVfCDNEnable', u'biosVfCDNSupport', u'biosVfCPUEnergyPerformance', u'biosVfCPUFrequencyFloor', u'biosVfCPUPerformance', u'biosVfCPUPowerManagement', u'biosVfCkeLowPolicy', u'biosVfClosedLoopThermThrotl', u'biosVfCmciEnable', u'biosVfConfigTDP', u'biosVfConsoleRedirection', u'biosVfCoreMultiProcessing', u'biosVfDCUPrefetch', u'biosVfDRAMClockThrottling', u'biosVfDemandScrub', u'biosVfDirectCacheAccess', u'biosVfDramRefreshRate', u'biosVfEngPerfTuning', u'biosVfEnhancedIntelSpeedStepTech', u'biosVfExecuteDisableBit', u'biosVfExtendedAPIC', u'biosVfFRB2Enable', u'biosVfHWPMEnable', u'biosVfHardwarePrefetch', u'biosVfIMCInterleave', u'biosVfIOHResource', u'biosVfIPV6PXE', u'biosVfIntelHyperThreadingTech', u'biosVfIntelTurboBoostTech', u'biosVfIntelVTForDirectedIO', u'biosVfIntelVirtualizationTechnology', u'biosVfIohErrorEn', u'biosVfKTIPrefetch', u'biosVfLLCPrefetch', u'biosVfLOMPortOptionROM', u'biosVfLegacyUSBSupport', u'biosVfLvDIMMSupport', u'biosVfMMCFGBase', u'biosVfMemoryInterleave', u'biosVfMemoryMappedIOAbove4GB', u'biosVfMirroringMode', u'biosVfNUMAOptimized', u'biosVfOSBootWatchdogTimer', u'biosVfOSBootWatchdogTimerPolicy', u'biosVfOSBootWatchdogTimerTimeout', u'biosVfOnboardNIC', u'biosVfOnboardStorage', u'biosVfOnboardStorageSWStack', u'biosVfOutOfBandMgmtPort', u'biosVfPCIOptionROMs', u'biosVfPCISlotOptionROMEnable', u'biosVfPCIeSSDHotPlugSupport', u'biosVfPOSTErrorPause', u'biosVfPSata', u'biosVfPStateCoordType', u'biosVfPackageCStateLimit', u'biosVfPatrolScrub', u'biosVfPatrolScrubDuration', u'biosVfPchUsb30Mode', u'biosVfPciRomClp', u'biosVfPowerOnPasswordSupport', u'biosVfProcessorC1E', u'biosVfProcessorC3Report', u'biosVfProcessorC6Report', u'biosVfProcessorCState', u'biosVfPwrPerfTuning', u'biosVfQPIConfig', u'biosVfQpiSnoopMode', u'biosVfSataModeSelect', u'biosVfSelectMemoryRASConfiguration', u'biosVfSerialPortAEnable', u'biosVfSinglePCTLEnable', u'biosVfSparingMode', u'biosVfSrIov', u'biosVfSubNumaClustering', u'biosVfTPMControl', u'biosVfTPMSupport', u'biosVfUCSMBootOrderRuleControl', u'biosVfUSBBootConfig', u'biosVfUSBEmulation', u'biosVfUSBPortsConfig', u'biosVfUsbXhciSupport', u'biosVfVgaPriority', u'biosVfWorkLoadConfig', u'biosVfXPTPrefetch'], ["Get"]),
"BiosProfile": MoMeta("BiosProfile", "biosProfile", "bios-profile-[name]", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosProfileManagement'], [u'biosProfileToken'], ["Get", "Set"]),
"BiosProfileManagement": MoMeta("BiosProfileManagement", "biosProfileManagement", "profile-mgmt", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [u'biosProfile', u'uploadBiosProfile'], ["Get", "Set"]),
"BiosProfileToken": MoMeta("BiosProfileToken", "biosProfileToken", "token-[name]", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosProfile'], [], ["Get"]),
"BiosSettings": MoMeta("BiosSettings", "biosSettings", "bios-settings", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [u'biosVfIMCInterleave'], ["Get"]),
"BiosUnit": MoMeta("BiosUnit", "biosUnit", "bios", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'biosBOT', u'biosPlatformDefaults', u'biosProfileManagement', u'biosSettings', u'firmwareBootDefinition', u'firmwareRunning', u'firmwareUpdatable'], ["Get", "Set"]),
"BiosVfASPMSupport": MoMeta("BiosVfASPMSupport", "biosVfASPMSupport", "ASPM-Support", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAdjacentCacheLinePrefetch": MoMeta("BiosVfAdjacentCacheLinePrefetch", "biosVfAdjacentCacheLinePrefetch", "Adjacent-Cache-Line-Prefetch", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAltitude": MoMeta("BiosVfAltitude", "biosVfAltitude", "Altitude-Param", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAssertNMIOnPERR": MoMeta("BiosVfAssertNMIOnPERR", "biosVfAssertNMIOnPERR", "Assert-NMI-on-PERR", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAssertNMIOnSERR": MoMeta("BiosVfAssertNMIOnSERR", "biosVfAssertNMIOnSERR", "Assert-NMI-on-SERR", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAutonumousCstateEnable": MoMeta("BiosVfAutonumousCstateEnable", "biosVfAutonumousCstateEnable", "Autonumous-Cstate-Enable", VersionMeta.Version2010b, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfBootOptionNumRetry": MoMeta("BiosVfBootOptionNumRetry", "biosVfBootOptionNumRetry", "Boot-option-num-retry", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfBootOptionReCoolDown": MoMeta("BiosVfBootOptionReCoolDown", "biosVfBootOptionReCoolDown", "Boot-option-cool-down-retry", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfBootOptionRetry": MoMeta("BiosVfBootOptionRetry", "biosVfBootOptionRetry", "Boot-option-retry", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfBootPerformanceMode": MoMeta("BiosVfBootPerformanceMode", "biosVfBootPerformanceMode", "Boot-Performance-Mode", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCDNEnable": MoMeta("BiosVfCDNEnable", "biosVfCDNEnable", "CDN-Enable", VersionMeta.Version204c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCDNSupport": MoMeta("BiosVfCDNSupport", "biosVfCDNSupport", "CDN-Support", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUEnergyPerformance": MoMeta("BiosVfCPUEnergyPerformance", "biosVfCPUEnergyPerformance", "CPU-EngPerfBias", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUFrequencyFloor": MoMeta("BiosVfCPUFrequencyFloor", "biosVfCPUFrequencyFloor", "CPU-FreqFloor", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUPerformance": MoMeta("BiosVfCPUPerformance", "biosVfCPUPerformance", "CPU-Performance", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUPowerManagement": MoMeta("BiosVfCPUPowerManagement", "biosVfCPUPowerManagement", "CPU-PowerManagement", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCkeLowPolicy": MoMeta("BiosVfCkeLowPolicy", "biosVfCkeLowPolicy", "Cke-Low-Policy", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfClosedLoopThermThrotl": MoMeta("BiosVfClosedLoopThermThrotl", "biosVfClosedLoopThermThrotl", "Closed-Loop-Therm-Throtl", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCmciEnable": MoMeta("BiosVfCmciEnable", "biosVfCmciEnable", "Cmci-Enable", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfConfigTDP": MoMeta("BiosVfConfigTDP", "biosVfConfigTDP", "Config-TDP", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfConsoleRedirection": MoMeta("BiosVfConsoleRedirection", "biosVfConsoleRedirection", "Console-redirection", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCoreMultiProcessing": MoMeta("BiosVfCoreMultiProcessing", "biosVfCoreMultiProcessing", "Core-MultiProcessing", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDCUPrefetch": MoMeta("BiosVfDCUPrefetch", "biosVfDCUPrefetch", "DCU-Prefetch", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDRAMClockThrottling": MoMeta("BiosVfDRAMClockThrottling", "biosVfDRAMClockThrottling", "DRAM-Clock-Throttling", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDemandScrub": MoMeta("BiosVfDemandScrub", "biosVfDemandScrub", "Demand-Scrub-Param", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDirectCacheAccess": MoMeta("BiosVfDirectCacheAccess", "biosVfDirectCacheAccess", "Direct-Cache-Access", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDramRefreshRate": MoMeta("BiosVfDramRefreshRate", "biosVfDramRefreshRate", "dram-refresh-rate", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfEngPerfTuning": MoMeta("BiosVfEngPerfTuning", "biosVfEngPerfTuning", "Eng-Perf-Tuning", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfEnhancedIntelSpeedStepTech": MoMeta("BiosVfEnhancedIntelSpeedStepTech", "biosVfEnhancedIntelSpeedStepTech", "Enhanced-Intel-SpeedStep-Tech", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfExecuteDisableBit": MoMeta("BiosVfExecuteDisableBit", "biosVfExecuteDisableBit", "Execute-Disable-Bit", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfExtendedAPIC": MoMeta("BiosVfExtendedAPIC", "biosVfExtendedAPIC", "Extended-APIC", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfFRB2Enable": MoMeta("BiosVfFRB2Enable", "biosVfFRB2Enable", "FRB2-Enable", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfHWPMEnable": MoMeta("BiosVfHWPMEnable", "biosVfHWPMEnable", "HWPM-Enable", VersionMeta.Version2010b, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfHardwarePrefetch": MoMeta("BiosVfHardwarePrefetch", "biosVfHardwarePrefetch", "Hardware-Prefetch", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIMCInterleave": MoMeta("BiosVfIMCInterleave", "biosVfIMCInterleave", "imc-interleave", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIOHResource": MoMeta("BiosVfIOHResource", "biosVfIOHResource", "ioh-resource", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIPV6PXE": MoMeta("BiosVfIPV6PXE", "biosVfIPV6PXE", "IPv6-Pxe", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelHyperThreadingTech": MoMeta("BiosVfIntelHyperThreadingTech", "biosVfIntelHyperThreadingTech", "Intel-HyperThreading-Tech", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelTurboBoostTech": MoMeta("BiosVfIntelTurboBoostTech", "biosVfIntelTurboBoostTech", "Intel-Turbo-Boost-Tech", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelVTForDirectedIO": MoMeta("BiosVfIntelVTForDirectedIO", "biosVfIntelVTForDirectedIO", "Intel-VT-for-directed-IO", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelVirtualizationTechnology": MoMeta("BiosVfIntelVirtualizationTechnology", "biosVfIntelVirtualizationTechnology", "Intel-Virtualization-Technology", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIohErrorEn": MoMeta("BiosVfIohErrorEn", "biosVfIohErrorEn", "Ioh-Error-En", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfKTIPrefetch": MoMeta("BiosVfKTIPrefetch", "biosVfKTIPrefetch", "kti-prefetch", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfLLCPrefetch": MoMeta("BiosVfLLCPrefetch", "biosVfLLCPrefetch", "LLC-Prefetch", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfLOMPortOptionROM": MoMeta("BiosVfLOMPortOptionROM", "biosVfLOMPortOptionROM", "LOMPort-OptionROM", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfLegacyUSBSupport": MoMeta("BiosVfLegacyUSBSupport", "biosVfLegacyUSBSupport", "LegacyUSB-Support", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfLvDIMMSupport": MoMeta("BiosVfLvDIMMSupport", "biosVfLvDIMMSupport", "LvDIMM-Support", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMMCFGBase": MoMeta("BiosVfMMCFGBase", "biosVfMMCFGBase", "MMCFG-Base", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMemoryInterleave": MoMeta("BiosVfMemoryInterleave", "biosVfMemoryInterleave", "Memory-Interleave", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMemoryMappedIOAbove4GB": MoMeta("BiosVfMemoryMappedIOAbove4GB", "biosVfMemoryMappedIOAbove4GB", "Memory-mapped-IO-above-4GB", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMirroringMode": MoMeta("BiosVfMirroringMode", "biosVfMirroringMode", "Mirroring-Mode", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfNUMAOptimized": MoMeta("BiosVfNUMAOptimized", "biosVfNUMAOptimized", "NUMA-optimized", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOSBootWatchdogTimer": MoMeta("BiosVfOSBootWatchdogTimer", "biosVfOSBootWatchdogTimer", "OS-Boot-Watchdog-Timer-Param", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOSBootWatchdogTimerPolicy": MoMeta("BiosVfOSBootWatchdogTimerPolicy", "biosVfOSBootWatchdogTimerPolicy", "OS-Boot-Watchdog-Timer-Policy", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOSBootWatchdogTimerTimeout": MoMeta("BiosVfOSBootWatchdogTimerTimeout", "biosVfOSBootWatchdogTimerTimeout", "OS-Boot-Watchdog-Timer-Time-Out", VersionMeta.Version151x, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOnboardNIC": MoMeta("BiosVfOnboardNIC", "biosVfOnboardNIC", "Onboard-NIC", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOnboardStorage": MoMeta("BiosVfOnboardStorage", "biosVfOnboardStorage", "Onboard-Storage", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOnboardStorageSWStack": MoMeta("BiosVfOnboardStorageSWStack", "biosVfOnboardStorageSWStack", "Onboard-SCU-Storage-SWStack", VersionMeta.Version151x, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOutOfBandMgmtPort": MoMeta("BiosVfOutOfBandMgmtPort", "biosVfOutOfBandMgmtPort", "OoB-MgmtPort", VersionMeta.Version154, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPCIOptionROMs": MoMeta("BiosVfPCIOptionROMs", "biosVfPCIOptionROMs", "PCI-OptionROMs", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPCISlotOptionROMEnable": MoMeta("BiosVfPCISlotOptionROMEnable", "biosVfPCISlotOptionROMEnable", "PCI-Slot-OptionROM-Enable", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPCIeSSDHotPlugSupport": MoMeta("BiosVfPCIeSSDHotPlugSupport", "biosVfPCIeSSDHotPlugSupport", "PCIeSSDHotPlugSupport", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPOSTErrorPause": MoMeta("BiosVfPOSTErrorPause", "biosVfPOSTErrorPause", "POST-error-pause", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPSata": MoMeta("BiosVfPSata", "biosVfPSata", "PSata", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPStateCoordType": MoMeta("BiosVfPStateCoordType", "biosVfPStateCoordType", "p-state-coord", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPackageCStateLimit": MoMeta("BiosVfPackageCStateLimit", "biosVfPackageCStateLimit", "Package-CState-Limit", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPatrolScrub": MoMeta("BiosVfPatrolScrub", "biosVfPatrolScrub", "Patrol-Scrub-Param", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPatrolScrubDuration": MoMeta("BiosVfPatrolScrubDuration", "biosVfPatrolScrubDuration", "Patrol-Scrub-Duration", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPchUsb30Mode": MoMeta("BiosVfPchUsb30Mode", "biosVfPchUsb30Mode", "PchUsb30-Mode", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPciRomClp": MoMeta("BiosVfPciRomClp", "biosVfPciRomClp", "pci-rom-clp", VersionMeta.Version204c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPowerOnPasswordSupport": MoMeta("BiosVfPowerOnPasswordSupport", "biosVfPowerOnPasswordSupport", "POP-Support", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorC1E": MoMeta("BiosVfProcessorC1E", "biosVfProcessorC1E", "Processor-C1E", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorC3Report": MoMeta("BiosVfProcessorC3Report", "biosVfProcessorC3Report", "Processor-C3-Report", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorC6Report": MoMeta("BiosVfProcessorC6Report", "biosVfProcessorC6Report", "Processor-C6-Report", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorCState": MoMeta("BiosVfProcessorCState", "biosVfProcessorCState", "Processor-C-State", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPwrPerfTuning": MoMeta("BiosVfPwrPerfTuning", "biosVfPwrPerfTuning", "Pwr-Perf-Tuning", VersionMeta.Version204c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfQPIConfig": MoMeta("BiosVfQPIConfig", "biosVfQPIConfig", "QPI-Config", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfQpiSnoopMode": MoMeta("BiosVfQpiSnoopMode", "biosVfQpiSnoopMode", "QPI-Snoop-Mode", VersionMeta.Version204c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfResumeOnACPowerLoss": MoMeta("BiosVfResumeOnACPowerLoss", "biosVfResumeOnACPowerLoss", "Resume-on-AC-power-loss", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get", "Set"]),
"BiosVfSataModeSelect": MoMeta("BiosVfSataModeSelect", "biosVfSataModeSelect", "SataModeSelect", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSelectMemoryRASConfiguration": MoMeta("BiosVfSelectMemoryRASConfiguration", "biosVfSelectMemoryRASConfiguration", "SelectMemory-RAS-configuration", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSerialPortAEnable": MoMeta("BiosVfSerialPortAEnable", "biosVfSerialPortAEnable", "Serial-port-A-enable", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSinglePCTLEnable": MoMeta("BiosVfSinglePCTLEnable", "biosVfSinglePCTLEnable", "Single-PCTL-Enable", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSparingMode": MoMeta("BiosVfSparingMode", "biosVfSparingMode", "Sparing-Mode", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSrIov": MoMeta("BiosVfSrIov", "biosVfSrIov", "sr-iov", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSubNumaClustering": MoMeta("BiosVfSubNumaClustering", "biosVfSubNumaClustering", "sub-numa-cluster", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfTPMControl": MoMeta("BiosVfTPMControl", "biosVfTPMControl", "TPM-Control", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfTPMSupport": MoMeta("BiosVfTPMSupport", "biosVfTPMSupport", "TPM-Support", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUCSMBootOrderRuleControl": MoMeta("BiosVfUCSMBootOrderRuleControl", "biosVfUCSMBootOrderRuleControl", "Boot-Order-Rules", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUSBBootConfig": MoMeta("BiosVfUSBBootConfig", "biosVfUSBBootConfig", "USB-Boot-Config", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUSBEmulation": MoMeta("BiosVfUSBEmulation", "biosVfUSBEmulation", "USBEmulation-Support", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUSBPortsConfig": MoMeta("BiosVfUSBPortsConfig", "biosVfUSBPortsConfig", "USB-Ports-Config", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUsbXhciSupport": MoMeta("BiosVfUsbXhciSupport", "biosVfUsbXhciSupport", "UsbXhci-Support", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfVgaPriority": MoMeta("BiosVfVgaPriority", "biosVfVgaPriority", "VgaPriority", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfWorkLoadConfig": MoMeta("BiosVfWorkLoadConfig", "biosVfWorkLoadConfig", "work-load-config", VersionMeta.Version204c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfXPTPrefetch": MoMeta("BiosVfXPTPrefetch", "biosVfXPTPrefetch", "xpt-prefetch", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"CertificateManagement": MoMeta("CertificateManagement", "certificateManagement", "cert-mgmt", VersionMeta.Version209c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'currentCertificate', u'generateCertificateSigningRequest', u'uploadCertificate'], ["Get"]),
"CommHttp": MoMeta("CommHttp", "commHttp", "http-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommHttps": MoMeta("CommHttps", "commHttps", "https-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommIpmiLan": MoMeta("CommIpmiLan", "commIpmiLan", "ipmi-lan-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommKvm": MoMeta("CommKvm", "commKvm", "kvm-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommMailAlert": MoMeta("CommMailAlert", "commMailAlert", "mail-alert-svc", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [u'mailRecipient'], ["Get", "Set"]),
"CommNtpProvider": MoMeta("CommNtpProvider", "commNtpProvider", "ntp-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommRedfish": MoMeta("CommRedfish", "commRedfish", "redfish-svc", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommSavedVMediaMap": MoMeta("CommSavedVMediaMap", "commSavedVMediaMap", "saved-vmmap-[volume_name]", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commVMedia'], [], ["Get", "Remove", "Set"]),
"CommSnmp": MoMeta("CommSnmp", "commSnmp", "snmp-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [u'commSnmpTrap', u'commSnmpUser'], ["Get", "Set"]),
"CommSnmpTrap": MoMeta("CommSnmpTrap", "commSnmpTrap", "snmp-trap-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSnmp'], [], ["Get", "Set"]),
"CommSnmpUser": MoMeta("CommSnmpUser", "commSnmpUser", "snmpv3-user-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSnmp'], [], ["Get", "Set"]),
"CommSsh": MoMeta("CommSsh", "commSsh", "ssh-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommSvcEp": MoMeta("CommSvcEp", "commSvcEp", "svc-ext", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'commHttp', u'commHttps', u'commIpmiLan', u'commKvm', u'commMailAlert', u'commNtpProvider', u'commRedfish', u'commSnmp', u'commSsh', u'commSyslog', u'commVMedia'], ["Get"]),
"CommSyslog": MoMeta("CommSyslog", "commSyslog", "syslog", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [u'commSyslogClient'], ["Get", "Set"]),
"CommSyslogClient": MoMeta("CommSyslogClient", "commSyslogClient", "client-[name]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSyslog'], [], ["Get"]),
"CommVMedia": MoMeta("CommVMedia", "commVMedia", "vmedia-svc", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [u'commSavedVMediaMap', u'commVMediaMap'], ["Get", "Set"]),
"CommVMediaMap": MoMeta("CommVMediaMap", "commVMediaMap", "vmmap-[volume_name]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commVMedia'], [], ["Add", "Get"]),
"ComputeBoard": MoMeta("ComputeBoard", "computeBoard", "board", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'biosVfResumeOnACPowerLoss', u'computeMbPowerStats', u'computeRackUnitMbTempStats', u'equipmentTpm', u'fanPolicy', u'faultInst', u'memoryArray', u'pidCatalog', u'processorUnit', u'storageController', u'storageControllerNVMe', u'storageFlexFlashController', u'storageFlexUtilController', u'storageLocalDiskSlotEp'], ["Get"]),
"ComputeMbPowerStats": MoMeta("ComputeMbPowerStats", "computeMbPowerStats", "power-stats", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get"]),
"ComputeRackUnit": MoMeta("ComputeRackUnit", "computeRackUnit", "rack-unit-[server_id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "user"], [u'topSystem'], [u'adaptorUnit', u'biosUnit', u'computeBoard', u'equipmentFanModule', u'equipmentIndicatorLed', u'equipmentLocatorLed', u'equipmentPsu', u'equipmentPsuColdRedundancy', u'eventManagement', u'faultInst', u'lsbootDef', u'lsbootDevPrecision', u'mgmtController', u'networkAdapterUnit', u'oneTimeBootDevice', u'oneTimePrecisionBootDevice', u'pciEquipSlot', u'powerBudget', u'powerMonitor', u'serverUtilization', u'solIf', u'sysdebugTechSupportExport', u'systemIOController', u'x86LiveDebug'], ["Get", "Set"]),
"ComputeRackUnitMbTempStats": MoMeta("ComputeRackUnitMbTempStats", "computeRackUnitMbTempStats", "temp-stats", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get"]),
"CurrentCertificate": MoMeta("CurrentCertificate", "currentCertificate", "curr-cert", VersionMeta.Version209c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'certificateManagement'], [], [None]),
"DownloadClientCertificate": MoMeta("DownloadClientCertificate", "downloadClientCertificate", "kmip-client-cert-download", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"DownloadClientPrivateKey": MoMeta("DownloadClientPrivateKey", "downloadClientPrivateKey", "kmip-private-key-download", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"DownloadLdapCACertificate": MoMeta("DownloadLdapCACertificate", "downloadLdapCACertificate", "ldap-ca-cert-download", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'ldapCACertificateManagement'], [], ["Get"]),
"DownloadRootCACertificate": MoMeta("DownloadRootCACertificate", "downloadRootCACertificate", "kmip-ca-cert-download", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"EquipmentFan": MoMeta("EquipmentFan", "equipmentFan", "fan-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentFanModule'], [u'faultInst'], ["Get"]),
"EquipmentFanModule": MoMeta("EquipmentFanModule", "equipmentFanModule", "fan-module-[tray]-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'equipmentFan'], ["Get"]),
"EquipmentIndicatorLed": MoMeta("EquipmentIndicatorLed", "equipmentIndicatorLed", "indicator-led-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get"]),
"EquipmentLocatorLed": MoMeta("EquipmentLocatorLed", "equipmentLocatorLed", "locator-led", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get", "Set"]),
"EquipmentPsu": MoMeta("EquipmentPsu", "equipmentPsu", "psu-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'equipmentPsuFan', u'faultInst'], ["Get"]),
"EquipmentPsuColdRedundancy": MoMeta("EquipmentPsuColdRedundancy", "equipmentPsuColdRedundancy", "psu-cold-redundancy", VersionMeta.Version204c, "InputOutput", 0x1, [], ["admin"], [u'computeRackUnit'], [], ["Get", "Set"]),
"EquipmentPsuFan": MoMeta("EquipmentPsuFan", "equipmentPsuFan", "fan-[id]", VersionMeta.Version202c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentPsu'], [], ["Get"]),
"EquipmentTpm": MoMeta("EquipmentTpm", "equipmentTpm", "tpm", VersionMeta.Version201a, "OutputOnly", 0x1, [], ["read-only"], [u'computeBoard'], [], ["Get"]),
"Error": MoMeta("Error", "error", "", VersionMeta.Version151f, "OutputOnly", 0x1, [], [""], [], [], [None]),
"EventManagement": MoMeta("EventManagement", "eventManagement", "event-management", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'platformEventFilters'], ["Get", "Set"]),
"ExportClientCertificate": MoMeta("ExportClientCertificate", "exportClientCertificate", "kmip-client-cert-export", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"ExportClientPrivateKey": MoMeta("ExportClientPrivateKey", "exportClientPrivateKey", "kmip-private-key-export", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"ExportLdapCACertificate": MoMeta("ExportLdapCACertificate", "exportLdapCACertificate", "ldap-ca-cert-export", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'ldapCACertificateManagement'], [], ["Get"]),
"ExportRootCACertificate": MoMeta("ExportRootCACertificate", "exportRootCACertificate", "kmip-ca-cert-export", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"FanPolicy": MoMeta("FanPolicy", "fanPolicy", "fan-policy", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get", "Set"]),
"FaultInst": MoMeta("FaultInst", "faultInst", "fault-[code]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit', u'computeBoard', u'computeRackUnit', u'equipmentFan', u'equipmentPsu', u'memoryArray', u'memoryUnit', u'mgmtIf', u'pciEquipSlot', u'powerBudget', u'processorUnit', u'storageController', u'storageFlexFlashController', u'storageFlexFlashPhysicalDrive', u'storageFlexFlashVirtualDrive', u'storageLocalDisk', u'storageRaidBattery', u'storageVirtualDrive', u'sysdebugMEpLog'], [], ["Get"]),
"FirmwareBootDefinition": MoMeta("FirmwareBootDefinition", "firmwareBootDefinition", "fw-boot-def", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit', u'mgmtController', u'storageController', u'systemIOController'], [u'firmwareBootUnit'], ["Get"]),
"FirmwareBootUnit": MoMeta("FirmwareBootUnit", "firmwareBootUnit", "bootunit-[type]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'firmwareBootDefinition'], [], ["Get", "Set"]),
"FirmwareRunning": MoMeta("FirmwareRunning", "firmwareRunning", "fw-[deployment]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit', u'mgmtController', u'storageController', u'systemIOController'], [], ["Get"]),
"FirmwareUpdatable": MoMeta("FirmwareUpdatable", "firmwareUpdatable", "fw-updatable", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit', u'mgmtController', u'systemIOController'], [], ["Get"]),
"GenerateCertificateSigningRequest": MoMeta("GenerateCertificateSigningRequest", "generateCertificateSigningRequest", "gen-csr-req", VersionMeta.Version209c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'certificateManagement'], [], [None]),
"GenerateRandomPassword": MoMeta("GenerateRandomPassword", "generateRandomPassword", "policy", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get"]),
"GeneratedStorageControllerKeyId": MoMeta("GeneratedStorageControllerKeyId", "generatedStorageControllerKeyId", "gen-key-id", VersionMeta.Version209c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"GpuInventory": MoMeta("GpuInventory", "gpuInventory", "gpu-inv-[id]", VersionMeta.Version303a, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pciEquipSlot'], [], ["Get"]),
"HuuController": MoMeta("HuuController", "huuController", "huu", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'huuFirmwareCatalog', u'huuFirmwareRunning', u'huuFirmwareUpdateCancel', u'huuFirmwareUpdater'], ["Get"]),
"HuuFirmwareCatalog": MoMeta("HuuFirmwareCatalog", "huuFirmwareCatalog", "firmwareCatalog", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [u'huuFirmwareCatalogComponent'], ["Get"]),
"HuuFirmwareCatalogComponent": MoMeta("HuuFirmwareCatalogComponent", "huuFirmwareCatalogComponent", "id-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareCatalog'], [], ["Get"]),
"HuuFirmwareComponent": MoMeta("HuuFirmwareComponent", "huuFirmwareComponent", "component-[component]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareRunning'], [], ["Get"]),
"HuuFirmwareRunning": MoMeta("HuuFirmwareRunning", "huuFirmwareRunning", "currentFirmware", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [u'huuFirmwareComponent'], ["Get"]),
"HuuFirmwareUpdateCancel": MoMeta("HuuFirmwareUpdateCancel", "huuFirmwareUpdateCancel", "firmwareUpdateCancel", VersionMeta.Version152, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [], ["Get", "Set"]),
"HuuFirmwareUpdateStatus": MoMeta("HuuFirmwareUpdateStatus", "huuFirmwareUpdateStatus", "updateStatus", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareUpdater'], [u'huuUpdateComponentStatus'], ["Get"]),
"HuuFirmwareUpdater": MoMeta("HuuFirmwareUpdater", "huuFirmwareUpdater", "firmwareUpdater", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [], ["Get"]),
"HuuUpdateComponentStatus": MoMeta("HuuUpdateComponentStatus", "huuUpdateComponentStatus", "component-[component]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareUpdateStatus'], [], ["Get"]),
"IodController": MoMeta("IodController", "iodController", "iod", VersionMeta.Version151x, "OutputOnly", 0x1, [], ["read-only"], [u'topSystem'], [u'iodSnapshotCancel', u'iodSnapshotStart', u'iodSnapshotStatus'], ["Get"]),
"IodSnapshotCancel": MoMeta("IodSnapshotCancel", "iodSnapshotCancel", "snapshotCancel", VersionMeta.Version151x, "InputOutput", 0x1, [], ["admin"], [u'iodController'], [], [None]),
"IodSnapshotStart": MoMeta("IodSnapshotStart", "iodSnapshotStart", "snapshotStart", VersionMeta.Version151x, "InputOutput", 0x1, [], ["admin"], [u'iodController'], [], [None]),
"IodSnapshotStatus": MoMeta("IodSnapshotStatus", "iodSnapshotStatus", "snapshotStatus", VersionMeta.Version151x, "OutputOnly", 0x1, [], ["read-only"], [u'iodController'], [], ["Get"]),
"IpBlocking": MoMeta("IpBlocking", "ipBlocking", "ip-block", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'mgmtIf'], [], ["Get", "Set"]),
"IpFiltering": MoMeta("IpFiltering", "ipFiltering", "ip-filter", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'mgmtIf'], [], ["Get", "Set"]),
"KmipManagement": MoMeta("KmipManagement", "kmipManagement", "kmip-mgmt", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'downloadClientCertificate', u'downloadClientPrivateKey', u'downloadRootCACertificate', u'exportClientCertificate', u'exportClientPrivateKey', u'exportRootCACertificate', u'kmipServer', u'kmipServerLogin'], ["Get", "Set"]),
"KmipServer": MoMeta("KmipServer", "kmipServer", "kmip-server-[id]", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], ["Get", "Set"]),
"KmipServerLogin": MoMeta("KmipServerLogin", "kmipServerLogin", "kmip-login", VersionMeta.Version302b, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], ["Get", "Set"]),
"LdapCACertificate": MoMeta("LdapCACertificate", "ldapCACertificate", "ldap-ca-cert", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "user"], [u'ldapCACertificateManagement'], [], ["Get", "Set"]),
"LdapCACertificateManagement": MoMeta("LdapCACertificateManagement", "ldapCACertificateManagement", "ldap-ca-cert-mgmt", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "user"], [u'aaaLdap'], [u'downloadLdapCACertificate', u'exportLdapCACertificate', u'ldapCACertificate'], ["Get", "Set"]),
"LsbootBootSecurity": MoMeta("LsbootBootSecurity", "lsbootBootSecurity", "boot-security", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "user"], [u'lsbootDef'], [], ["Get", "Set"]),
"LsbootCdd": MoMeta("LsbootCdd", "lsbootCdd", "cdd-[name]", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Get", "Set"]),
"LsbootDef": MoMeta("LsbootDef", "lsbootDef", "boot-policy", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'lsbootBootSecurity', u'lsbootEfi', u'lsbootLan', u'lsbootStorage', u'lsbootVirtualMedia'], ["Get", "Set"]),
"LsbootDevPrecision": MoMeta("LsbootDevPrecision", "lsbootDevPrecision", "boot-precision", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'lsbootCdd', u'lsbootHdd', u'lsbootIscsi', u'lsbootNVMe', u'lsbootPchStorage', u'lsbootPxe', u'lsbootSan', u'lsbootSd', u'lsbootUefiShell', u'lsbootUsb', u'lsbootVMedia'], ["Get", "Set"]),
"LsbootEfi": MoMeta("LsbootEfi", "lsbootEfi", "efi-read-only", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootHdd": MoMeta("LsbootHdd", "lsbootHdd", "hdd-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootIscsi": MoMeta("LsbootIscsi", "lsbootIscsi", "iscsi-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootLan": MoMeta("LsbootLan", "lsbootLan", "lan-read-only", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootLocalStorage": MoMeta("LsbootLocalStorage", "lsbootLocalStorage", "local-storage", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootStorage'], [], ["Get"]),
"LsbootNVMe": MoMeta("LsbootNVMe", "lsbootNVMe", "nvme-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Get", "Set"]),
"LsbootPchStorage": MoMeta("LsbootPchStorage", "lsbootPchStorage", "pchstorage-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootPxe": MoMeta("LsbootPxe", "lsbootPxe", "pxe-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootSan": MoMeta("LsbootSan", "lsbootSan", "san-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootSd": MoMeta("LsbootSd", "lsbootSd", "sd-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootStorage": MoMeta("LsbootStorage", "lsbootStorage", "storage-read-write", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootUefiShell": MoMeta("LsbootUefiShell", "lsbootUefiShell", "uefishell-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootUsb": MoMeta("LsbootUsb", "lsbootUsb", "usb-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootVMedia": MoMeta("LsbootVMedia", "lsbootVMedia", "vm-[name]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootVirtualMedia": MoMeta("LsbootVirtualMedia", "lsbootVirtualMedia", "vm-[access]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get"]),
"MailRecipient": MoMeta("MailRecipient", "mailRecipient", "mail-recipient-[id]", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commMailAlert'], [], ["Get", "Remove", "Set"]),
"MemoryArray": MoMeta("MemoryArray", "memoryArray", "memarray-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'memoryUnit'], ["Get", "Set"]),
"MemoryUnit": MoMeta("MemoryUnit", "memoryUnit", "mem-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'memoryArray'], [u'faultInst', u'memoryUnitEnvStats'], ["Get"]),
"MemoryUnitEnvStats": MoMeta("MemoryUnitEnvStats", "memoryUnitEnvStats", "dimm-env-stats", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'memoryUnit'], [], ["Get"]),
"MgmtBackup": MoMeta("MgmtBackup", "mgmtBackup", "export-config", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [], [None]),
"MgmtController": MoMeta("MgmtController", "mgmtController", "mgmt", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit', u'computeRackUnit', u'storageSasExpander'], [u'firmwareBootDefinition', u'firmwareRunning', u'firmwareUpdatable', u'mgmtIf', u'sysdebugMEpLog'], ["Get"]),
"MgmtIf": MoMeta("MgmtIf", "mgmtIf", "if-1", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'mgmtController'], [u'faultInst', u'ipBlocking', u'ipFiltering'], ["Get", "Set"]),
"MgmtImporter": MoMeta("MgmtImporter", "mgmtImporter", "import-config", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [], [None]),
"MgmtInventory": MoMeta("MgmtInventory", "mgmtInventory", "inventory", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [], [None]),
"NetworkAdapterEthIf": MoMeta("NetworkAdapterEthIf", "networkAdapterEthIf", "eth-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'networkAdapterUnit'], [], ["Get"]),
"NetworkAdapterUnit": MoMeta("NetworkAdapterUnit", "networkAdapterUnit", "network-adapter-[slot]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'networkAdapterEthIf'], ["Get"]),
"OneTimeBootDevice": MoMeta("OneTimeBootDevice", "oneTimeBootDevice", "boot-one-time", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get", "Set"]),
"OneTimePrecisionBootDevice": MoMeta("OneTimePrecisionBootDevice", "oneTimePrecisionBootDevice", "one-time-precision-boot", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get", "Set"]),
"OsiCancel": MoMeta("OsiCancel", "osiCancel", "osiCancel", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin"], [u'osiController'], [], [None]),
"OsiController": MoMeta("OsiController", "osiController", "osi", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["read-only"], [u'topSystem'], [u'osiCancel', u'osiStart', u'osiStatus'], ["Get"]),
"OsiStart": MoMeta("OsiStart", "osiStart", "osiStart", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin"], [u'osiController'], [], ["Get"]),
"OsiStatus": MoMeta("OsiStatus", "osiStatus", "osiStatus", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["read-only"], [u'osiController'], [], ["Get"]),
"PciEquipSlot": MoMeta("PciEquipSlot", "pciEquipSlot", "equipped-slot-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'faultInst', u'gpuInventory'], ["Get"]),
"PidCatalog": MoMeta("PidCatalog", "pidCatalog", "pid", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'activatePIDCatalog', u'pidCatalogCpu', u'pidCatalogDimm', u'pidCatalogHdd', u'pidCatalogPCIAdapter', u'uploadPIDCatalog'], ["Get"]),
"PidCatalogCpu": MoMeta("PidCatalogCpu", "pidCatalogCpu", "pid-cpu-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PidCatalogDimm": MoMeta("PidCatalogDimm", "pidCatalogDimm", "pid-dimm-[name]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PidCatalogHdd": MoMeta("PidCatalogHdd", "pidCatalogHdd", "pid-hdd-[disk]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PidCatalogPCIAdapter": MoMeta("PidCatalogPCIAdapter", "pidCatalogPCIAdapter", "pid-pciadapter-[slot]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PlatformEventFilters": MoMeta("PlatformEventFilters", "platformEventFilters", "pef-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'eventManagement'], [], ["Get", "Set"]),
"PowerBudget": MoMeta("PowerBudget", "powerBudget", "budget", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'advancedPowerProfile', u'faultInst', u'standardPowerProfile'], ["Get", "Set"]),
"PowerMonitor": MoMeta("PowerMonitor", "powerMonitor", "pwrmonitor-[domain]", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get"]),
"ProcessorEnvStats": MoMeta("ProcessorEnvStats", "processorEnvStats", "env-stats", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'processorUnit'], [], ["Get"]),
"ProcessorUnit": MoMeta("ProcessorUnit", "processorUnit", "cpu-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'processorEnvStats'], ["Get"]),
"SelfEncryptStorageController": MoMeta("SelfEncryptStorageController", "selfEncryptStorageController", "ctr-self-encrypt", VersionMeta.Version209c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get", "Set"]),
"ServerUtilization": MoMeta("ServerUtilization", "serverUtilization", "utilization", VersionMeta.Version202c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get"]),
"SolIf": MoMeta("SolIf", "solIf", "sol-if", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get", "Set"]),
"StandardPowerProfile": MoMeta("StandardPowerProfile", "standardPowerProfile", "stdpwrprof", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'powerBudget'], [], ["Get", "Set"]),
"StorageController": MoMeta("StorageController", "storageController", "storage-[type]-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'firmwareBootDefinition', u'firmwareRunning', u'generatedStorageControllerKeyId', u'selfEncryptStorageController', u'storageControllerHealth', u'storageControllerProps', u'storageControllerSettings', u'storageLocalDisk', u'storageLocalDiskProps', u'storageRaidBattery', u'storageVirtualDrive', u'storageVirtualDriveCreatorUsingUnusedPhysicalDrive', u'storageVirtualDriveCreatorUsingVirtualDriveGroup', u'suggestedStorageControllerSecurityKey'], ["Get", "Set"]),
"StorageControllerHealth": MoMeta("StorageControllerHealth", "storageControllerHealth", "controller-health", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"StorageControllerNVMe": MoMeta("StorageControllerNVMe", "storageControllerNVMe", "storage-NVMe-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'storageNVMePhysicalDrive'], ["Get"]),
"StorageControllerProps": MoMeta("StorageControllerProps", "storageControllerProps", "controller-props", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"StorageControllerSettings": MoMeta("StorageControllerSettings", "storageControllerSettings", "controller-settings", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"StorageFlexFlashController": MoMeta("StorageFlexFlashController", "storageFlexFlashController", "storage-flexflash-[id]", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'storageFlexFlashControllerProps', u'storageFlexFlashOperationalProfile', u'storageFlexFlashPhysicalDrive', u'storageFlexFlashVirtualDrive', u'storageFlexFlashVirtualDriveImageMap'], ["Get", "Set"]),
"StorageFlexFlashControllerProps": MoMeta("StorageFlexFlashControllerProps", "storageFlexFlashControllerProps", "flexflashcontroller-props", VersionMeta.Version202c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [], ["Get"]),
"StorageFlexFlashOperationalProfile": MoMeta("StorageFlexFlashOperationalProfile", "storageFlexFlashOperationalProfile", "oper-profile", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [], ["Get", "Set"]),
"StorageFlexFlashPhysicalDrive": MoMeta("StorageFlexFlashPhysicalDrive", "storageFlexFlashPhysicalDrive", "card-[physical_drive_id]", VersionMeta.Version202c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get"]),
"StorageFlexFlashVirtualDrive": MoMeta("StorageFlexFlashVirtualDrive", "storageFlexFlashVirtualDrive", "vd-[partition_id]", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get", "Set"]),
"StorageFlexFlashVirtualDriveImageMap": MoMeta("StorageFlexFlashVirtualDriveImageMap", "storageFlexFlashVirtualDriveImageMap", "vdrive-map-[virtual_drive]", VersionMeta.Version202c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [], ["Get", "Set"]),
"StorageFlexUtilController": MoMeta("StorageFlexUtilController", "storageFlexUtilController", "storage-flexutil-[id]", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'storageFlexUtilHealth', u'storageFlexUtilOperationalProfile', u'storageFlexUtilPhysicalDrive', u'storageFlexUtilVirtualDrive', u'storageFlexUtilVirtualDriveImageMap'], ["Get", "Set"]),
"StorageFlexUtilHealth": MoMeta("StorageFlexUtilHealth", "storageFlexUtilHealth", "health", VersionMeta.Version311d, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexUtilController'], [], ["Get"]),
"StorageFlexUtilOperationalProfile": MoMeta("StorageFlexUtilOperationalProfile", "storageFlexUtilOperationalProfile", "oper-profile", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexUtilController'], [], ["Get", "Set"]),
"StorageFlexUtilPhysicalDrive": MoMeta("StorageFlexUtilPhysicalDrive", "storageFlexUtilPhysicalDrive", "card-[physical_drive_id]", VersionMeta.Version311d, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexUtilController'], [], ["Get"]),
"StorageFlexUtilVirtualDrive": MoMeta("StorageFlexUtilVirtualDrive", "storageFlexUtilVirtualDrive", "vd-[partition_name]", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexUtilController'], [], ["Get", "Set"]),
"StorageFlexUtilVirtualDriveImageMap": MoMeta("StorageFlexUtilVirtualDriveImageMap", "storageFlexUtilVirtualDriveImageMap", "vdrive-map-[virtual_drive]", VersionMeta.Version311d, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexUtilController'], [], ["Get", "Set"]),
"StorageLocalDisk": MoMeta("StorageLocalDisk", "storageLocalDisk", "pd-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [u'faultInst', u'storageLocalDiskProps', u'storageOperation'], ["Get", "Set"]),
"StorageLocalDiskProps": MoMeta("StorageLocalDiskProps", "storageLocalDiskProps", "general-props", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController', u'storageLocalDisk'], [], ["Get"]),
"StorageLocalDiskSlotEp": MoMeta("StorageLocalDiskSlotEp", "storageLocalDiskSlotEp", "disk-[id]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get"]),
"StorageLocalDiskUsage": MoMeta("StorageLocalDiskUsage", "storageLocalDiskUsage", "pd-[physical_drive]", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageVirtualDrive'], [], ["Get"]),
"StorageNVMePhysicalDrive": MoMeta("StorageNVMePhysicalDrive", "storageNVMePhysicalDrive", "pd-[id]", VersionMeta.Version311d, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageControllerNVMe'], [], ["Get"]),
"StorageOperation": MoMeta("StorageOperation", "storageOperation", "storage-operation", VersionMeta.Version201a, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageLocalDisk', u'storageRaidBattery', u'storageVirtualDrive'], [], ["Get"]),
"StorageRaidBattery": MoMeta("StorageRaidBattery", "storageRaidBattery", "raid-battery", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [u'faultInst', u'storageOperation'], ["Get", "Set"]),
"StorageSasExpander": MoMeta("StorageSasExpander", "storageSasExpander", "sas-expander-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["read-only"], [u'topSystem'], [u'mgmtController'], ["Get"]),
"StorageUnusedLocalDisk": MoMeta("StorageUnusedLocalDisk", "storageUnusedLocalDisk", "pd-[id]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageVirtualDriveCreatorUsingUnusedPhysicalDrive'], [], ["Get"]),
"StorageVirtualDrive": MoMeta("StorageVirtualDrive", "storageVirtualDrive", "vd-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [u'faultInst', u'storageLocalDiskUsage', u'storageOperation'], ["Get", "Remove", "Set"]),
"StorageVirtualDriveCreatorUsingUnusedPhysicalDrive": MoMeta("StorageVirtualDriveCreatorUsingUnusedPhysicalDrive", "storageVirtualDriveCreatorUsingUnusedPhysicalDrive", "virtual-drive-create", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin"], [u'storageController'], [u'storageUnusedLocalDisk'], ["Get", "Set"]),
"StorageVirtualDriveCreatorUsingVirtualDriveGroup": MoMeta("StorageVirtualDriveCreatorUsingVirtualDriveGroup", "storageVirtualDriveCreatorUsingVirtualDriveGroup", "virtual-drive-carve", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin"], [u'storageController'], [u'storageVirtualDriveWithDriveGroupSpace'], ["Get", "Set"]),
"StorageVirtualDriveWithDriveGroupSpace": MoMeta("StorageVirtualDriveWithDriveGroupSpace", "storageVirtualDriveWithDriveGroupSpace", "vd-[id]", VersionMeta.Version201a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageVirtualDriveCreatorUsingVirtualDriveGroup'], [], ["Get"]),
"SuggestedStorageControllerSecurityKey": MoMeta("SuggestedStorageControllerSecurityKey", "suggestedStorageControllerSecurityKey", "suggested-sec-key", VersionMeta.Version209c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"SysdebugMEpLog": MoMeta("SysdebugMEpLog", "sysdebugMEpLog", "log-[type]-[id]", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'mgmtController'], [u'faultInst'], ["Get", "Set"]),
"SysdebugTechSupportExport": MoMeta("SysdebugTechSupportExport", "sysdebugTechSupportExport", "tech-support", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], [None]),
"SystemBoardUnit": MoMeta("SystemBoardUnit", "systemBoardUnit", "sys-board-unit", VersionMeta.Version311d, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [], ["Get"]),
"SystemIOController": MoMeta("SystemIOController", "systemIOController", "sioc-[id]", VersionMeta.Version202c, "OutputOnly", 0x1, [], ["read-only"], [u'computeRackUnit'], [u'firmwareBootDefinition', u'firmwareRunning', u'firmwareUpdatable'], ["Get"]),
"TopRoot": MoMeta("TopRoot", "topRoot", "", VersionMeta.Version151f, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [], [u'topSystem'], ["Get"]),
"TopSystem": MoMeta("TopSystem", "topSystem", "sys", VersionMeta.Version151f, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topRoot'], [u'aaaLdap', u'aaaUserEp', u'certificateManagement', u'commSvcEp', u'computeRackUnit', u'huuController', u'iodController', u'kmipManagement', u'mgmtBackup', u'mgmtImporter', u'mgmtInventory', u'osiController', u'storageSasExpander', u'systemBoardUnit', u'vicBackupAll', u'vicImporterAll'], ["Get", "Set"]),
"UploadBiosProfile": MoMeta("UploadBiosProfile", "uploadBiosProfile", "upload-bios-profile", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosProfileManagement'], [], [None]),
"UploadCertificate": MoMeta("UploadCertificate", "uploadCertificate", "upload-cert", VersionMeta.Version209c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'certificateManagement'], [], [None]),
"UploadPIDCatalog": MoMeta("UploadPIDCatalog", "uploadPIDCatalog", "upload-catalog", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"VicBackupAll": MoMeta("VicBackupAll", "vicBackupAll", "vic-all-exportconfig", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [], ["Get", "Set"]),
"VicImporterAll": MoMeta("VicImporterAll", "vicImporterAll", "vic-all-importconfig", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [], ["Get", "Set"]),
"X86LiveDebug": MoMeta("X86LiveDebug", "x86LiveDebug", "live-debug", VersionMeta.Version311d, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get"]),
},
"modular": {
"AaaLdap": MoMeta("AaaLdap", "aaaLdap", "ldap-ext", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'aaaLdapRoleGroup', u'ldapCACertificateManagement'], ["Get", "Set"]),
"AaaLdapRoleGroup": MoMeta("AaaLdapRoleGroup", "aaaLdapRoleGroup", "rolegroup-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'aaaLdap'], [], ["Get", "Set"]),
"AaaSession": MoMeta("AaaSession", "aaaSession", "term-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'aaaUserEp'], [], ["Get"]),
"AaaUser": MoMeta("AaaUser", "aaaUser", "user-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'aaaUserEp'], [], ["Get", "Set"]),
"AaaUserEp": MoMeta("AaaUserEp", "aaaUserEp", "user-ext", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'aaaSession', u'aaaUser', u'aaaUserPasswordExpiration', u'aaaUserPolicy', u'generateRandomPassword'], ["Get"]),
"AaaUserPasswordExpiration": MoMeta("AaaUserPasswordExpiration", "aaaUserPasswordExpiration", "password-expiration", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get", "Set"]),
"AaaUserPolicy": MoMeta("AaaUserPolicy", "aaaUserPolicy", "policy", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get", "Set"]),
"ActivatePIDCatalog": MoMeta("ActivatePIDCatalog", "activatePIDCatalog", "activate-catalog", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get", "Set"]),
"AdaptorCfgBackup": MoMeta("AdaptorCfgBackup", "adaptorCfgBackup", "export-config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Get", "Set"]),
"AdaptorCfgImporter": MoMeta("AdaptorCfgImporter", "adaptorCfgImporter", "import-config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Get", "Set"]),
"AdaptorConnectorInfo": MoMeta("AdaptorConnectorInfo", "adaptorConnectorInfo", "connector-info", VersionMeta.Version303a, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorExtEthIf'], [], ["Get"]),
"AdaptorEthCompQueueProfile": MoMeta("AdaptorEthCompQueueProfile", "adaptorEthCompQueueProfile", "eth-comp-q", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthGenProfile": MoMeta("AdaptorEthGenProfile", "adaptorEthGenProfile", "general", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthISCSIProfile": MoMeta("AdaptorEthISCSIProfile", "adaptorEthISCSIProfile", "ethiscsi", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Add", "Get", "Remove", "Set"]),
"AdaptorEthInterruptProfile": MoMeta("AdaptorEthInterruptProfile", "adaptorEthInterruptProfile", "eth-int", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthOffloadProfile": MoMeta("AdaptorEthOffloadProfile", "adaptorEthOffloadProfile", "eth-offload", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthRdmaProfile": MoMeta("AdaptorEthRdmaProfile", "adaptorEthRdmaProfile", "rdmaprofile", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthRecvQueueProfile": MoMeta("AdaptorEthRecvQueueProfile", "adaptorEthRecvQueueProfile", "eth-rcv-q", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorEthUSNICProfile": MoMeta("AdaptorEthUSNICProfile", "adaptorEthUSNICProfile", "ethusnic", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'adaptorHostEthIf'], [], ["Get", "Remove", "Set"]),
"AdaptorEthWorkQueueProfile": MoMeta("AdaptorEthWorkQueueProfile", "adaptorEthWorkQueueProfile", "eth-work-q", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorExtEthIf": MoMeta("AdaptorExtEthIf", "adaptorExtEthIf", "ext-eth-[port_id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [u'adaptorConnectorInfo', u'adaptorLinkTraining', u'adaptorPortProfiles'], ["Get", "Set"]),
"AdaptorExtIpV6RssHashProfile": MoMeta("AdaptorExtIpV6RssHashProfile", "adaptorExtIpV6RssHashProfile", "ext-ipv6-rss-hash", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorFcBootTable": MoMeta("AdaptorFcBootTable", "adaptorFcBootTable", "fcboot-[index]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Add", "Get", "Set"]),
"AdaptorFcCdbWorkQueueProfile": MoMeta("AdaptorFcCdbWorkQueueProfile", "adaptorFcCdbWorkQueueProfile", "fc-cdb-work-q", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcErrorRecoveryProfile": MoMeta("AdaptorFcErrorRecoveryProfile", "adaptorFcErrorRecoveryProfile", "fc-err-rec", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcGenProfile": MoMeta("AdaptorFcGenProfile", "adaptorFcGenProfile", "general", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcInterruptProfile": MoMeta("AdaptorFcInterruptProfile", "adaptorFcInterruptProfile", "fc-int", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcPersistentBindings": MoMeta("AdaptorFcPersistentBindings", "adaptorFcPersistentBindings", "perbi-[index]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get"]),
"AdaptorFcPortFLogiProfile": MoMeta("AdaptorFcPortFLogiProfile", "adaptorFcPortFLogiProfile", "fc-port-flogi", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcPortPLogiProfile": MoMeta("AdaptorFcPortPLogiProfile", "adaptorFcPortPLogiProfile", "fc-port-plogi", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcPortProfile": MoMeta("AdaptorFcPortProfile", "adaptorFcPortProfile", "fc-port", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcRecvQueueProfile": MoMeta("AdaptorFcRecvQueueProfile", "adaptorFcRecvQueueProfile", "fc-rcv-q", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorFcWorkQueueProfile": MoMeta("AdaptorFcWorkQueueProfile", "adaptorFcWorkQueueProfile", "fc-work-q", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostFcIf'], [], ["Get", "Set"]),
"AdaptorGenProfile": MoMeta("AdaptorGenProfile", "adaptorGenProfile", "general", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Get", "Set"]),
"AdaptorHostEthIf": MoMeta("AdaptorHostEthIf", "adaptorHostEthIf", "host-eth-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [], ["Add", "Get", "Remove", "Set"]),
"AdaptorHostFcIf": MoMeta("AdaptorHostFcIf", "adaptorHostFcIf", "host-fc-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit'], [u'adaptorFcPersistentBindings'], ["Add", "Get", "Remove", "Set"]),
"AdaptorIpV4RssHashProfile": MoMeta("AdaptorIpV4RssHashProfile", "adaptorIpV4RssHashProfile", "ipv4-rss-hash", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorIpV6RssHashProfile": MoMeta("AdaptorIpV6RssHashProfile", "adaptorIpV6RssHashProfile", "ipv6-rss-hash", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorLinkTraining": MoMeta("AdaptorLinkTraining", "adaptorLinkTraining", "link-training", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "user"], [u'adaptorExtEthIf'], [], ["Get", "Set"]),
"AdaptorPortProfiles": MoMeta("AdaptorPortProfiles", "adaptorPortProfiles", "port-profiles", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorExtEthIf'], [], ["Get"]),
"AdaptorRssProfile": MoMeta("AdaptorRssProfile", "adaptorRssProfile", "rss", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'adaptorHostEthIf'], [], ["Get", "Set"]),
"AdaptorUnit": MoMeta("AdaptorUnit", "adaptorUnit", "adaptor-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'adaptorCfgBackup', u'adaptorCfgImporter', u'adaptorExtEthIf', u'adaptorHostEthIf', u'adaptorHostFcIf', u'faultInst', u'mgmtController'], ["Get", "Set"]),
"BiosBOT": MoMeta("BiosBOT", "biosBOT", "bdgep", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [u'biosBootDevGrp', u'biosBootDevPrecision', u'biosBootMode'], ["Get"]),
"BiosBootDev": MoMeta("BiosBootDev", "biosBootDev", "bdv-[order]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosBootDevGrp'], [], ["Get"]),
"BiosBootDevGrp": MoMeta("BiosBootDevGrp", "biosBootDevGrp", "bdg-[order]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosBOT'], [u'biosBootDev'], ["Get"]),
"BiosBootDevPrecision": MoMeta("BiosBootDevPrecision", "biosBootDevPrecision", "bdvp-[order]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosBOT'], [], ["Get"]),
"BiosBootMode": MoMeta("BiosBootMode", "biosBootMode", "boot-mode", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "user"], [u'biosBOT'], [], ["Get"]),
"BiosPlatformDefaults": MoMeta("BiosPlatformDefaults", "biosPlatformDefaults", "bios-defaults", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [u'biosVfASPMSupport', u'biosVfAdjacentCacheLinePrefetch', u'biosVfAltitude', u'biosVfAssertNMIOnPERR', u'biosVfAssertNMIOnSERR', u'biosVfAutonumousCstateEnable', u'biosVfBootOptionRetry', u'biosVfBootPerformanceMode', u'biosVfCDNEnable', u'biosVfCDNSupport', u'biosVfCPUEnergyPerformance', u'biosVfCPUFrequencyFloor', u'biosVfCPUPerformance', u'biosVfCPUPowerManagement', u'biosVfCkeLowPolicy', u'biosVfCmciEnable', u'biosVfConsoleRedirection', u'biosVfCoreMultiProcessing', u'biosVfDCUPrefetch', u'biosVfDRAMClockThrottling', u'biosVfDemandScrub', u'biosVfDirectCacheAccess', u'biosVfDramRefreshRate', u'biosVfEnhancedIntelSpeedStepTech', u'biosVfExecuteDisableBit', u'biosVfExtendedAPIC', u'biosVfFRB2Enable', u'biosVfHWPMEnable', u'biosVfHardwarePrefetch', u'biosVfIOHResource', u'biosVfIntelHyperThreadingTech', u'biosVfIntelTurboBoostTech', u'biosVfIntelVTForDirectedIO', u'biosVfIntelVirtualizationTechnology', u'biosVfLOMPortOptionROM', u'biosVfLegacyUSBSupport', u'biosVfLvDIMMSupport', u'biosVfMMCFGBase', u'biosVfMemoryInterleave', u'biosVfMemoryMappedIOAbove4GB', u'biosVfMirroringMode', u'biosVfNUMAOptimized', u'biosVfOSBootWatchdogTimer', u'biosVfOSBootWatchdogTimerPolicy', u'biosVfOSBootWatchdogTimerTimeout', u'biosVfOnboardNIC', u'biosVfOnboardStorage', u'biosVfOnboardStorageSWStack', u'biosVfOutOfBandMgmtPort', u'biosVfPCIOptionROMs', u'biosVfPCISlotOptionROMEnable', u'biosVfPOSTErrorPause', u'biosVfPStateCoordType', u'biosVfPackageCStateLimit', u'biosVfPatrolScrub', u'biosVfPatrolScrubDuration', u'biosVfPchUsb30Mode', u'biosVfPciRomClp', u'biosVfPowerOnPasswordSupport', u'biosVfProcessorC1E', u'biosVfProcessorC3Report', u'biosVfProcessorC6Report', u'biosVfProcessorCState', u'biosVfPwrPerfTuning', u'biosVfQPIConfig', u'biosVfQpiSnoopMode', u'biosVfSataModeSelect', u'biosVfSelectMemoryRASConfiguration', u'biosVfSerialPortAEnable', u'biosVfSparingMode', u'biosVfSrIov', u'biosVfTPMSupport', u'biosVfUCSMBootOrderRuleControl', u'biosVfUSBBootConfig', u'biosVfUSBEmulation', u'biosVfUSBPortsConfig', u'biosVfUsbXhciSupport', u'biosVfVgaPriority', u'biosVfWorkLoadConfig'], ["Get"]),
"BiosProfile": MoMeta("BiosProfile", "biosProfile", "bios-profile-[name]", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosProfileManagement'], [u'biosProfileToken'], ["Get", "Set"]),
"BiosProfileManagement": MoMeta("BiosProfileManagement", "biosProfileManagement", "profile-mgmt", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [u'biosProfile', u'uploadBiosProfile'], ["Get", "Set"]),
"BiosProfileToken": MoMeta("BiosProfileToken", "biosProfileToken", "token-[name]", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosProfile'], [], ["Get"]),
"BiosSettings": MoMeta("BiosSettings", "biosSettings", "bios-settings", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit'], [], ["Get"]),
"BiosUnit": MoMeta("BiosUnit", "biosUnit", "bios", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'biosBOT', u'biosPlatformDefaults', u'biosProfileManagement', u'biosSettings', u'firmwareBootDefinition', u'firmwareRunning', u'firmwareUpdatable'], ["Get", "Set"]),
"BiosVfASPMSupport": MoMeta("BiosVfASPMSupport", "biosVfASPMSupport", "ASPM-Support", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAdjacentCacheLinePrefetch": MoMeta("BiosVfAdjacentCacheLinePrefetch", "biosVfAdjacentCacheLinePrefetch", "Adjacent-Cache-Line-Prefetch", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAltitude": MoMeta("BiosVfAltitude", "biosVfAltitude", "Altitude-Param", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAssertNMIOnPERR": MoMeta("BiosVfAssertNMIOnPERR", "biosVfAssertNMIOnPERR", "Assert-NMI-on-PERR", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAssertNMIOnSERR": MoMeta("BiosVfAssertNMIOnSERR", "biosVfAssertNMIOnSERR", "Assert-NMI-on-SERR", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfAutonumousCstateEnable": MoMeta("BiosVfAutonumousCstateEnable", "biosVfAutonumousCstateEnable", "Autonumous-Cstate-Enable", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfBootOptionRetry": MoMeta("BiosVfBootOptionRetry", "biosVfBootOptionRetry", "Boot-option-retry", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfBootPerformanceMode": MoMeta("BiosVfBootPerformanceMode", "biosVfBootPerformanceMode", "Boot-Performance-Mode", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCDNEnable": MoMeta("BiosVfCDNEnable", "biosVfCDNEnable", "CDN-Enable", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCDNSupport": MoMeta("BiosVfCDNSupport", "biosVfCDNSupport", "CDN-Support", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUEnergyPerformance": MoMeta("BiosVfCPUEnergyPerformance", "biosVfCPUEnergyPerformance", "CPU-EngPerfBias", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUFrequencyFloor": MoMeta("BiosVfCPUFrequencyFloor", "biosVfCPUFrequencyFloor", "CPU-FreqFloor", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUPerformance": MoMeta("BiosVfCPUPerformance", "biosVfCPUPerformance", "CPU-Performance", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCPUPowerManagement": MoMeta("BiosVfCPUPowerManagement", "biosVfCPUPowerManagement", "CPU-PowerManagement", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCkeLowPolicy": MoMeta("BiosVfCkeLowPolicy", "biosVfCkeLowPolicy", "Cke-Low-Policy", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCmciEnable": MoMeta("BiosVfCmciEnable", "biosVfCmciEnable", "Cmci-Enable", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfConsoleRedirection": MoMeta("BiosVfConsoleRedirection", "biosVfConsoleRedirection", "Console-redirection", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfCoreMultiProcessing": MoMeta("BiosVfCoreMultiProcessing", "biosVfCoreMultiProcessing", "Core-MultiProcessing", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDCUPrefetch": MoMeta("BiosVfDCUPrefetch", "biosVfDCUPrefetch", "DCU-Prefetch", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDRAMClockThrottling": MoMeta("BiosVfDRAMClockThrottling", "biosVfDRAMClockThrottling", "DRAM-Clock-Throttling", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDemandScrub": MoMeta("BiosVfDemandScrub", "biosVfDemandScrub", "Demand-Scrub-Param", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDirectCacheAccess": MoMeta("BiosVfDirectCacheAccess", "biosVfDirectCacheAccess", "Direct-Cache-Access", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfDramRefreshRate": MoMeta("BiosVfDramRefreshRate", "biosVfDramRefreshRate", "dram-refresh-rate", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfEnhancedIntelSpeedStepTech": MoMeta("BiosVfEnhancedIntelSpeedStepTech", "biosVfEnhancedIntelSpeedStepTech", "Enhanced-Intel-SpeedStep-Tech", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfExecuteDisableBit": MoMeta("BiosVfExecuteDisableBit", "biosVfExecuteDisableBit", "Execute-Disable-Bit", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfExtendedAPIC": MoMeta("BiosVfExtendedAPIC", "biosVfExtendedAPIC", "Extended-APIC", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfFRB2Enable": MoMeta("BiosVfFRB2Enable", "biosVfFRB2Enable", "FRB2-Enable", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfHWPMEnable": MoMeta("BiosVfHWPMEnable", "biosVfHWPMEnable", "HWPM-Enable", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfHardwarePrefetch": MoMeta("BiosVfHardwarePrefetch", "biosVfHardwarePrefetch", "Hardware-Prefetch", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIOHResource": MoMeta("BiosVfIOHResource", "biosVfIOHResource", "ioh-resource", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelHyperThreadingTech": MoMeta("BiosVfIntelHyperThreadingTech", "biosVfIntelHyperThreadingTech", "Intel-HyperThreading-Tech", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelTurboBoostTech": MoMeta("BiosVfIntelTurboBoostTech", "biosVfIntelTurboBoostTech", "Intel-Turbo-Boost-Tech", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelVTForDirectedIO": MoMeta("BiosVfIntelVTForDirectedIO", "biosVfIntelVTForDirectedIO", "Intel-VT-for-directed-IO", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfIntelVirtualizationTechnology": MoMeta("BiosVfIntelVirtualizationTechnology", "biosVfIntelVirtualizationTechnology", "Intel-Virtualization-Technology", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfLOMPortOptionROM": MoMeta("BiosVfLOMPortOptionROM", "biosVfLOMPortOptionROM", "LOMPort-OptionROM", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfLegacyUSBSupport": MoMeta("BiosVfLegacyUSBSupport", "biosVfLegacyUSBSupport", "LegacyUSB-Support", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfLvDIMMSupport": MoMeta("BiosVfLvDIMMSupport", "biosVfLvDIMMSupport", "LvDIMM-Support", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMMCFGBase": MoMeta("BiosVfMMCFGBase", "biosVfMMCFGBase", "MMCFG-Base", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMemoryInterleave": MoMeta("BiosVfMemoryInterleave", "biosVfMemoryInterleave", "Memory-Interleave", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMemoryMappedIOAbove4GB": MoMeta("BiosVfMemoryMappedIOAbove4GB", "biosVfMemoryMappedIOAbove4GB", "Memory-mapped-IO-above-4GB", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfMirroringMode": MoMeta("BiosVfMirroringMode", "biosVfMirroringMode", "Mirroring-Mode", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfNUMAOptimized": MoMeta("BiosVfNUMAOptimized", "biosVfNUMAOptimized", "NUMA-optimized", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOSBootWatchdogTimer": MoMeta("BiosVfOSBootWatchdogTimer", "biosVfOSBootWatchdogTimer", "OS-Boot-Watchdog-Timer-Param", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOSBootWatchdogTimerPolicy": MoMeta("BiosVfOSBootWatchdogTimerPolicy", "biosVfOSBootWatchdogTimerPolicy", "OS-Boot-Watchdog-Timer-Policy", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOSBootWatchdogTimerTimeout": MoMeta("BiosVfOSBootWatchdogTimerTimeout", "biosVfOSBootWatchdogTimerTimeout", "OS-Boot-Watchdog-Timer-Time-Out", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOnboardNIC": MoMeta("BiosVfOnboardNIC", "biosVfOnboardNIC", "Onboard-NIC", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOnboardStorage": MoMeta("BiosVfOnboardStorage", "biosVfOnboardStorage", "Onboard-Storage", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOnboardStorageSWStack": MoMeta("BiosVfOnboardStorageSWStack", "biosVfOnboardStorageSWStack", "Onboard-SCU-Storage-SWStack", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfOutOfBandMgmtPort": MoMeta("BiosVfOutOfBandMgmtPort", "biosVfOutOfBandMgmtPort", "OoB-MgmtPort", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPCIOptionROMs": MoMeta("BiosVfPCIOptionROMs", "biosVfPCIOptionROMs", "PCI-OptionROMs", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPCISlotOptionROMEnable": MoMeta("BiosVfPCISlotOptionROMEnable", "biosVfPCISlotOptionROMEnable", "PCI-Slot-OptionROM-Enable", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPOSTErrorPause": MoMeta("BiosVfPOSTErrorPause", "biosVfPOSTErrorPause", "POST-error-pause", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPStateCoordType": MoMeta("BiosVfPStateCoordType", "biosVfPStateCoordType", "p-state-coord", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPackageCStateLimit": MoMeta("BiosVfPackageCStateLimit", "biosVfPackageCStateLimit", "Package-CState-Limit", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPatrolScrub": MoMeta("BiosVfPatrolScrub", "biosVfPatrolScrub", "Patrol-Scrub-Param", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPatrolScrubDuration": MoMeta("BiosVfPatrolScrubDuration", "biosVfPatrolScrubDuration", "Patrol-Scrub-Duration", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPchUsb30Mode": MoMeta("BiosVfPchUsb30Mode", "biosVfPchUsb30Mode", "PchUsb30-Mode", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPciRomClp": MoMeta("BiosVfPciRomClp", "biosVfPciRomClp", "pci-rom-clp", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPowerOnPasswordSupport": MoMeta("BiosVfPowerOnPasswordSupport", "biosVfPowerOnPasswordSupport", "POP-Support", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorC1E": MoMeta("BiosVfProcessorC1E", "biosVfProcessorC1E", "Processor-C1E", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorC3Report": MoMeta("BiosVfProcessorC3Report", "biosVfProcessorC3Report", "Processor-C3-Report", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorC6Report": MoMeta("BiosVfProcessorC6Report", "biosVfProcessorC6Report", "Processor-C6-Report", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfProcessorCState": MoMeta("BiosVfProcessorCState", "biosVfProcessorCState", "Processor-C-State", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfPwrPerfTuning": MoMeta("BiosVfPwrPerfTuning", "biosVfPwrPerfTuning", "Pwr-Perf-Tuning", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfQPIConfig": MoMeta("BiosVfQPIConfig", "biosVfQPIConfig", "QPI-Config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfQpiSnoopMode": MoMeta("BiosVfQpiSnoopMode", "biosVfQpiSnoopMode", "QPI-Snoop-Mode", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfResumeOnACPowerLoss": MoMeta("BiosVfResumeOnACPowerLoss", "biosVfResumeOnACPowerLoss", "Resume-on-AC-power-loss", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get", "Set"]),
"BiosVfSataModeSelect": MoMeta("BiosVfSataModeSelect", "biosVfSataModeSelect", "Pch-Sata-Mode", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSelectMemoryRASConfiguration": MoMeta("BiosVfSelectMemoryRASConfiguration", "biosVfSelectMemoryRASConfiguration", "SelectMemory-RAS-configuration", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSerialPortAEnable": MoMeta("BiosVfSerialPortAEnable", "biosVfSerialPortAEnable", "Serial-port-A-enable", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSparingMode": MoMeta("BiosVfSparingMode", "biosVfSparingMode", "Sparing-Mode", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfSrIov": MoMeta("BiosVfSrIov", "biosVfSrIov", "sr-iov", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfTPMSupport": MoMeta("BiosVfTPMSupport", "biosVfTPMSupport", "TPM-Support", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUCSMBootOrderRuleControl": MoMeta("BiosVfUCSMBootOrderRuleControl", "biosVfUCSMBootOrderRuleControl", "Boot-Order-Rules", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUSBBootConfig": MoMeta("BiosVfUSBBootConfig", "biosVfUSBBootConfig", "USB-Boot-Config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUSBEmulation": MoMeta("BiosVfUSBEmulation", "biosVfUSBEmulation", "USBEmulation-Support", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUSBPortsConfig": MoMeta("BiosVfUSBPortsConfig", "biosVfUSBPortsConfig", "USB-Ports-Config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfUsbXhciSupport": MoMeta("BiosVfUsbXhciSupport", "biosVfUsbXhciSupport", "UsbXhci-Support", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfVgaPriority": MoMeta("BiosVfVgaPriority", "biosVfVgaPriority", "VgaPriority", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"BiosVfWorkLoadConfig": MoMeta("BiosVfWorkLoadConfig", "biosVfWorkLoadConfig", "work-load-config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"CertificateManagement": MoMeta("CertificateManagement", "certificateManagement", "cert-mgmt", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'currentCertificate', u'generateCertificateSigningRequest', u'uploadCertificate'], ["Get"]),
"CommHttp": MoMeta("CommHttp", "commHttp", "http-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommHttps": MoMeta("CommHttps", "commHttps", "https-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommIpmiLan": MoMeta("CommIpmiLan", "commIpmiLan", "ipmi-lan-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcRack'], [], ["Get", "Set"]),
"CommKvm": MoMeta("CommKvm", "commKvm", "kvm-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcRack'], [], ["Get", "Set"]),
"CommMailAlert": MoMeta("CommMailAlert", "commMailAlert", "mail-alert-svc", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [u'mailRecipient'], ["Get", "Set"]),
"CommNtpProvider": MoMeta("CommNtpProvider", "commNtpProvider", "ntp-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommRedfish": MoMeta("CommRedfish", "commRedfish", "redfish-svc", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommSavedVMediaMap": MoMeta("CommSavedVMediaMap", "commSavedVMediaMap", "saved-vmmap-[volume_name]", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commVMedia'], [], ["Get", "Remove", "Set"]),
"CommSnmp": MoMeta("CommSnmp", "commSnmp", "snmp-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [u'commSnmpTrap', u'commSnmpUser'], ["Get", "Set"]),
"CommSnmpTrap": MoMeta("CommSnmpTrap", "commSnmpTrap", "snmp-trap-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSnmp'], [], ["Get", "Set"]),
"CommSnmpUser": MoMeta("CommSnmpUser", "commSnmpUser", "snmpv3-user-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSnmp'], [], ["Get", "Set"]),
"CommSsh": MoMeta("CommSsh", "commSsh", "ssh-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [], ["Get", "Set"]),
"CommSvcEp": MoMeta("CommSvcEp", "commSvcEp", "svc-ext", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'commHttp', u'commHttps', u'commMailAlert', u'commNtpProvider', u'commRedfish', u'commSnmp', u'commSsh', u'commSyslog'], ["Get"]),
"CommSyslog": MoMeta("CommSyslog", "commSyslog", "syslog", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcEp'], [u'commSyslogClient'], ["Get", "Set"]),
"CommSyslogClient": MoMeta("CommSyslogClient", "commSyslogClient", "client-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSyslog'], [], ["Get"]),
"CommVMedia": MoMeta("CommVMedia", "commVMedia", "vmedia-svc", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commSvcRack'], [u'commSavedVMediaMap', u'commVMediaMap'], ["Get", "Set"]),
"CommVMediaMap": MoMeta("CommVMediaMap", "commVMediaMap", "vmmap-[volume_name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commVMedia'], [], ["Add", "Get"]),
"ComputeBoard": MoMeta("ComputeBoard", "computeBoard", "board", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'biosVfResumeOnACPowerLoss', u'computeMbPowerStats', u'computeServerNodeMbTempStats', u'equipmentTpm', u'faultInst', u'memoryArray', u'pidCatalog', u'processorUnit', u'storageController', u'storageControllerNVMe', u'storageFlexFlashController', u'storageLocalDiskSlotEp'], ["Get"]),
"ComputeMbPowerStats": MoMeta("ComputeMbPowerStats", "computeMbPowerStats", "power-stats", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get"]),
"CurrentCertificate": MoMeta("CurrentCertificate", "currentCertificate", "curr-cert", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'certificateManagement'], [], [None]),
"DownloadClientCertificate": MoMeta("DownloadClientCertificate", "downloadClientCertificate", "kmip-client-cert-download", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"DownloadClientPrivateKey": MoMeta("DownloadClientPrivateKey", "downloadClientPrivateKey", "kmip-private-key-download", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"DownloadLdapCACertificate": MoMeta("DownloadLdapCACertificate", "downloadLdapCACertificate", "ldap-ca-cert-download", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'ldapCACertificateManagement'], [], ["Get"]),
"DownloadRootCACertificate": MoMeta("DownloadRootCACertificate", "downloadRootCACertificate", "kmip-ca-cert-download", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"EquipmentFan": MoMeta("EquipmentFan", "equipmentFan", "fan-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentFanModule'], [u'faultInst'], ["Get"]),
"EquipmentFanModule": MoMeta("EquipmentFanModule", "equipmentFanModule", "fan-module-[tray]-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode', u'equipmentChassis'], [u'equipmentFan'], ["Get"]),
"EquipmentIndicatorLed": MoMeta("EquipmentIndicatorLed", "equipmentIndicatorLed", "indicator-led-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode', u'equipmentChassis'], [], ["Get"]),
"EquipmentLocatorLed": MoMeta("EquipmentLocatorLed", "equipmentLocatorLed", "locator-led", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get", "Set"]),
"EquipmentPsu": MoMeta("EquipmentPsu", "equipmentPsu", "psu-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode', u'equipmentChassis'], [u'faultInst'], ["Get"]),
"EquipmentTpm": MoMeta("EquipmentTpm", "equipmentTpm", "tpm", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["read-only"], [u'computeBoard'], [], ["Get"]),
"Error": MoMeta("Error", "error", "", VersionMeta.Version2013e, "OutputOnly", 0x1, [], [""], [], [], [None]),
"EventManagement": MoMeta("EventManagement", "eventManagement", "event-management", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [u'platformEventFilters'], ["Get", "Set"]),
"ExportClientCertificate": MoMeta("ExportClientCertificate", "exportClientCertificate", "kmip-client-cert-export", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"ExportClientPrivateKey": MoMeta("ExportClientPrivateKey", "exportClientPrivateKey", "kmip-private-key-export", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"ExportLdapCACertificate": MoMeta("ExportLdapCACertificate", "exportLdapCACertificate", "ldap-ca-cert-export", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'ldapCACertificateManagement'], [], ["Get"]),
"ExportRootCACertificate": MoMeta("ExportRootCACertificate", "exportRootCACertificate", "kmip-ca-cert-export", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], [None]),
"FaultInst": MoMeta("FaultInst", "faultInst", "fault-[code]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit', u'computeBoard', u'computeServerNode', u'equipmentChassis', u'equipmentFan', u'equipmentPsu', u'memoryArray', u'memoryUnit', u'pciEquipSlot', u'powerBudget', u'processorUnit', u'storageController', u'storageEnclosureDiskSlotEp', u'storageFlexFlashController', u'storageFlexFlashPhysicalDrive', u'storageFlexFlashVirtualDrive', u'storageLocalDisk', u'storageLocalDiskEp', u'storageRaidBattery', u'storageSasExpander', u'storageVirtualDrive', u'sysdebugMEpLog'], [], ["Get"]),
"FirmwareBootDefinition": MoMeta("FirmwareBootDefinition", "firmwareBootDefinition", "fw-boot-def", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit', u'mgmtController', u'storageController'], [u'firmwareBootUnit'], ["Get"]),
"FirmwareBootUnit": MoMeta("FirmwareBootUnit", "firmwareBootUnit", "bootunit-[type]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'firmwareBootDefinition'], [], ["Get", "Set"]),
"FirmwareRunning": MoMeta("FirmwareRunning", "firmwareRunning", "fw-[deployment]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit', u'mgmtController', u'storageController', u'storageControllerNVMe'], [], ["Get"]),
"FirmwareUpdatable": MoMeta("FirmwareUpdatable", "firmwareUpdatable", "fw-updatable", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosUnit', u'mgmtController'], [], ["Get"]),
"GenerateCertificateSigningRequest": MoMeta("GenerateCertificateSigningRequest", "generateCertificateSigningRequest", "gen-csr-req", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'certificateManagement'], [], [None]),
"GenerateRandomPassword": MoMeta("GenerateRandomPassword", "generateRandomPassword", "policy", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get"]),
"GeneratedStorageControllerKeyId": MoMeta("GeneratedStorageControllerKeyId", "generatedStorageControllerKeyId", "gen-key-id", VersionMeta.Version303a, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"HuuController": MoMeta("HuuController", "huuController", "huu", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'huuFirmwareCatalog', u'huuFirmwareRunning', u'huuFirmwareUpdateCancel', u'huuFirmwareUpdater'], ["Get"]),
"HuuFirmwareCatalog": MoMeta("HuuFirmwareCatalog", "huuFirmwareCatalog", "firmwareCatalog", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [u'huuFirmwareCatalogComponent'], ["Get"]),
"HuuFirmwareCatalogComponent": MoMeta("HuuFirmwareCatalogComponent", "huuFirmwareCatalogComponent", "id-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareCatalog'], [], ["Get"]),
"HuuFirmwareComponent": MoMeta("HuuFirmwareComponent", "huuFirmwareComponent", "component-[component]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareRunning'], [], ["Get"]),
"HuuFirmwareRunning": MoMeta("HuuFirmwareRunning", "huuFirmwareRunning", "currentFirmware", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [u'huuFirmwareComponent'], ["Get"]),
"HuuFirmwareUpdateCancel": MoMeta("HuuFirmwareUpdateCancel", "huuFirmwareUpdateCancel", "firmwareUpdateCancel", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [], ["Get", "Set"]),
"HuuFirmwareUpdateStatus": MoMeta("HuuFirmwareUpdateStatus", "huuFirmwareUpdateStatus", "updateStatus", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareUpdater'], [u'huuUpdateComponentStatus'], ["Get"]),
"HuuFirmwareUpdater": MoMeta("HuuFirmwareUpdater", "huuFirmwareUpdater", "firmwareUpdater", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'huuController'], [], ["Get"]),
"HuuUpdateComponentStatus": MoMeta("HuuUpdateComponentStatus", "huuUpdateComponentStatus", "component-[component]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'huuFirmwareUpdateStatus'], [], ["Get"]),
"IodController": MoMeta("IodController", "iodController", "iod", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["read-only"], [u'computeServerNode'], [u'iodSnapshotCancel', u'iodSnapshotStart', u'iodSnapshotStatus'], ["Get"]),
"IodSnapshotCancel": MoMeta("IodSnapshotCancel", "iodSnapshotCancel", "snapshotCancel", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'iodController'], [], [None]),
"IodSnapshotStart": MoMeta("IodSnapshotStart", "iodSnapshotStart", "snapshotStart", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'iodController'], [], [None]),
"IodSnapshotStatus": MoMeta("IodSnapshotStatus", "iodSnapshotStatus", "snapshotStatus", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["read-only"], [u'iodController'], [], ["Get"]),
"IpBlocking": MoMeta("IpBlocking", "ipBlocking", "ip-block", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'mgmtIf'], [], ["Get", "Set"]),
"IpFiltering": MoMeta("IpFiltering", "ipFiltering", "ip-filter", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'mgmtIf'], [], ["Get", "Set"]),
"KmipManagement": MoMeta("KmipManagement", "kmipManagement", "kmip-mgmt", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'downloadClientCertificate', u'downloadClientPrivateKey', u'downloadRootCACertificate', u'exportClientCertificate', u'exportClientPrivateKey', u'exportRootCACertificate', u'kmipServer', u'kmipServerLogin'], ["Get", "Set"]),
"KmipServer": MoMeta("KmipServer", "kmipServer", "kmip-server-[id]", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], ["Get", "Set"]),
"KmipServerLogin": MoMeta("KmipServerLogin", "kmipServerLogin", "kmip-login", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'kmipManagement'], [], ["Get", "Set"]),
"LdapCACertificate": MoMeta("LdapCACertificate", "ldapCACertificate", "ldap-ca-cert", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'ldapCACertificateManagement'], [], ["Get", "Set"]),
"LdapCACertificateManagement": MoMeta("LdapCACertificateManagement", "ldapCACertificateManagement", "ldap-ca-cert-mgmt", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'aaaLdap'], [u'downloadLdapCACertificate', u'exportLdapCACertificate', u'ldapCACertificate'], ["Get", "Set"]),
"LsbootBootSecurity": MoMeta("LsbootBootSecurity", "lsbootBootSecurity", "boot-security", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "user"], [u'lsbootDef'], [], ["Get", "Set"]),
"LsbootDef": MoMeta("LsbootDef", "lsbootDef", "boot-policy", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'lsbootBootSecurity', u'lsbootEfi', u'lsbootLan', u'lsbootStorage', u'lsbootVirtualMedia'], ["Get", "Set"]),
"LsbootDevPrecision": MoMeta("LsbootDevPrecision", "lsbootDevPrecision", "boot-precision", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'lsbootHdd', u'lsbootIscsi', u'lsbootNVMe', u'lsbootPchStorage', u'lsbootPxe', u'lsbootSan', u'lsbootSd', u'lsbootUefiShell', u'lsbootUsb', u'lsbootVMedia'], ["Get", "Set"]),
"LsbootEfi": MoMeta("LsbootEfi", "lsbootEfi", "efi-read-only", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootHdd": MoMeta("LsbootHdd", "lsbootHdd", "hdd-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootIscsi": MoMeta("LsbootIscsi", "lsbootIscsi", "iscsi-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootLan": MoMeta("LsbootLan", "lsbootLan", "lan-read-only", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootLocalStorage": MoMeta("LsbootLocalStorage", "lsbootLocalStorage", "local-storage", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootStorage'], [], ["Get"]),
"LsbootNVMe": MoMeta("LsbootNVMe", "lsbootNVMe", "nvme-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Get", "Set"]),
"LsbootPchStorage": MoMeta("LsbootPchStorage", "lsbootPchStorage", "pchstorage-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootPxe": MoMeta("LsbootPxe", "lsbootPxe", "pxe-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootSan": MoMeta("LsbootSan", "lsbootSan", "san-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootSd": MoMeta("LsbootSd", "lsbootSd", "sd-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootStorage": MoMeta("LsbootStorage", "lsbootStorage", "storage-read-write", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootUefiShell": MoMeta("LsbootUefiShell", "lsbootUefiShell", "uefishell-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootUsb": MoMeta("LsbootUsb", "lsbootUsb", "usb-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootVMedia": MoMeta("LsbootVMedia", "lsbootVMedia", "vm-[name]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDevPrecision'], [], ["Add", "Get", "Remove", "Set"]),
"LsbootVirtualMedia": MoMeta("LsbootVirtualMedia", "lsbootVirtualMedia", "vm-[access]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'lsbootDef'], [], ["Add", "Get"]),
"MailRecipient": MoMeta("MailRecipient", "mailRecipient", "mail-recipient-[id]", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'commMailAlert'], [], ["Get", "Remove", "Set"]),
"MemoryArray": MoMeta("MemoryArray", "memoryArray", "memarray-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'memoryUnit'], ["Get", "Set"]),
"MemoryUnit": MoMeta("MemoryUnit", "memoryUnit", "mem-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'memoryArray'], [u'faultInst', u'memoryUnitEnvStats'], ["Get"]),
"MemoryUnitEnvStats": MoMeta("MemoryUnitEnvStats", "memoryUnitEnvStats", "dimm-env-stats", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'memoryUnit'], [], ["Get"]),
"MgmtBackup": MoMeta("MgmtBackup", "mgmtBackup", "export-config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], [None]),
"MgmtController": MoMeta("MgmtController", "mgmtController", "mgmt", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'adaptorUnit', u'computeServerNode', u'equipmentSharedIOModule', u'equipmentSystemIOController', u'storageSasExpander'], [u'firmwareBootDefinition', u'firmwareRunning', u'firmwareUpdatable', u'mgmtIf', u'sysdebugMEpLog'], ["Get"]),
"MgmtIf": MoMeta("MgmtIf", "mgmtIf", "if-1", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis', u'mgmtController'], [u'ipBlocking', u'ipFiltering'], ["Get", "Set"]),
"MgmtImporter": MoMeta("MgmtImporter", "mgmtImporter", "import-config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], [None]),
"MgmtInventory": MoMeta("MgmtInventory", "mgmtInventory", "inventory", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], [None]),
"NetworkAdapterEthIf": MoMeta("NetworkAdapterEthIf", "networkAdapterEthIf", "eth-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'networkAdapterUnit'], [], ["Get"]),
"NetworkAdapterUnit": MoMeta("NetworkAdapterUnit", "networkAdapterUnit", "network-adapter-[slot]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'networkAdapterEthIf'], ["Get"]),
"OneTimeBootDevice": MoMeta("OneTimeBootDevice", "oneTimeBootDevice", "boot-one-time", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get", "Set"]),
"OneTimePrecisionBootDevice": MoMeta("OneTimePrecisionBootDevice", "oneTimePrecisionBootDevice", "one-time-precision-boot", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get", "Set"]),
"OsiCancel": MoMeta("OsiCancel", "osiCancel", "osiCancel", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin"], [u'osiController'], [], [None]),
"OsiController": MoMeta("OsiController", "osiController", "osi", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["read-only"], [u'computeServerNode'], [u'osiCancel', u'osiStart', u'osiStatus'], ["Get"]),
"OsiStart": MoMeta("OsiStart", "osiStart", "osiStart", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin"], [u'osiController'], [], ["Get"]),
"OsiStatus": MoMeta("OsiStatus", "osiStatus", "osiStatus", VersionMeta.Version301c, "OutputOnly", 0x1, [], ["read-only"], [u'osiController'], [], ["Get"]),
"PciEquipSlot": MoMeta("PciEquipSlot", "pciEquipSlot", "equipped-slot-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'faultInst'], ["Get"]),
"PidCatalog": MoMeta("PidCatalog", "pidCatalog", "pid", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'activatePIDCatalog', u'pidCatalogCpu', u'pidCatalogDimm', u'pidCatalogHdd', u'pidCatalogPCIAdapter'], ["Get"]),
"PidCatalogCpu": MoMeta("PidCatalogCpu", "pidCatalogCpu", "pid-cpu-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PidCatalogDimm": MoMeta("PidCatalogDimm", "pidCatalogDimm", "pid-dimm-[name]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PidCatalogHdd": MoMeta("PidCatalogHdd", "pidCatalogHdd", "pid-hdd-[disk]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PidCatalogPCIAdapter": MoMeta("PidCatalogPCIAdapter", "pidCatalogPCIAdapter", "pid-pciadapter-[slot]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'pidCatalog'], [], ["Get"]),
"PlatformEventFilters": MoMeta("PlatformEventFilters", "platformEventFilters", "pef-[id]", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'eventManagement'], [], ["Get", "Set"]),
"PowerBudget": MoMeta("PowerBudget", "powerBudget", "budget", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'customPowerProfile', u'faultInst', u'thermalPowerProfile'], ["Get", "Set"]),
"PowerMonitor": MoMeta("PowerMonitor", "powerMonitor", "pwrmonitor-[domain]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get"]),
"ProcessorEnvStats": MoMeta("ProcessorEnvStats", "processorEnvStats", "env-stats", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'processorUnit'], [], ["Get"]),
"ProcessorUnit": MoMeta("ProcessorUnit", "processorUnit", "cpu-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'processorEnvStats'], ["Get"]),
"SelfEncryptStorageController": MoMeta("SelfEncryptStorageController", "selfEncryptStorageController", "ctr-self-encrypt", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get", "Set"]),
"ServerUtilization": MoMeta("ServerUtilization", "serverUtilization", "utilization", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get"]),
"SolIf": MoMeta("SolIf", "solIf", "sol-if", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get", "Set"]),
"StorageController": MoMeta("StorageController", "storageController", "storage-[type]-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'firmwareBootDefinition', u'firmwareRunning', u'generatedStorageControllerKeyId', u'selfEncryptStorageController', u'storageControllerHealth', u'storageControllerProps', u'storageControllerSettings', u'storageLocalDisk', u'storageLocalDiskEp', u'storageLocalDiskProps', u'storageRaidBattery', u'storageVirtualDrive', u'storageVirtualDriveCreatorUsingUnusedPhysicalDrive', u'storageVirtualDriveCreatorUsingVirtualDriveGroup', u'suggestedStorageControllerSecurityKey'], ["Get", "Set"]),
"StorageControllerHealth": MoMeta("StorageControllerHealth", "storageControllerHealth", "controller-health", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"StorageControllerNVMe": MoMeta("StorageControllerNVMe", "storageControllerNVMe", "storage-NVMe-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'firmwareRunning'], ["Get"]),
"StorageControllerProps": MoMeta("StorageControllerProps", "storageControllerProps", "controller-props", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"StorageControllerSettings": MoMeta("StorageControllerSettings", "storageControllerSettings", "controller-settings", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"StorageFlexFlashController": MoMeta("StorageFlexFlashController", "storageFlexFlashController", "storage-flexflash-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [u'faultInst', u'storageFlexFlashControllerProps', u'storageFlexFlashOperationalProfile', u'storageFlexFlashPhysicalDrive', u'storageFlexFlashVirtualDrive', u'storageFlexFlashVirtualDriveImageMap'], ["Get", "Set"]),
"StorageFlexFlashControllerProps": MoMeta("StorageFlexFlashControllerProps", "storageFlexFlashControllerProps", "flexflashcontroller-props", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [], ["Get"]),
"StorageFlexFlashOperationalProfile": MoMeta("StorageFlexFlashOperationalProfile", "storageFlexFlashOperationalProfile", "oper-profile", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [], ["Get", "Set"]),
"StorageFlexFlashPhysicalDrive": MoMeta("StorageFlexFlashPhysicalDrive", "storageFlexFlashPhysicalDrive", "card-[physical_drive_id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get"]),
"StorageFlexFlashVirtualDrive": MoMeta("StorageFlexFlashVirtualDrive", "storageFlexFlashVirtualDrive", "vd-[partition_id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get", "Set"]),
"StorageFlexFlashVirtualDriveImageMap": MoMeta("StorageFlexFlashVirtualDriveImageMap", "storageFlexFlashVirtualDriveImageMap", "vdrive-map-[virtual_drive]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [], ["Get", "Set"]),
"StorageLocalDisk": MoMeta("StorageLocalDisk", "storageLocalDisk", "pd-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [u'faultInst', u'storageLocalDiskProps', u'storageOperation'], ["Get", "Set"]),
"StorageLocalDiskProps": MoMeta("StorageLocalDiskProps", "storageLocalDiskProps", "general-props", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController', u'storageLocalDisk'], [], ["Get"]),
"StorageLocalDiskSlotEp": MoMeta("StorageLocalDiskSlotEp", "storageLocalDiskSlotEp", "disk-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get"]),
"StorageLocalDiskUsage": MoMeta("StorageLocalDiskUsage", "storageLocalDiskUsage", "pd-[physical_drive]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageVirtualDrive'], [], ["Get"]),
"StorageOperation": MoMeta("StorageOperation", "storageOperation", "storage-operation", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageLocalDisk', u'storageRaidBattery', u'storageVirtualDrive'], [], ["Get"]),
"StorageRaidBattery": MoMeta("StorageRaidBattery", "storageRaidBattery", "raid-battery", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [u'faultInst', u'storageOperation'], ["Get", "Set"]),
"StorageSasExpander": MoMeta("StorageSasExpander", "storageSasExpander", "sas-expander-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["read-only"], [u'equipmentChassis'], [u'faultInst', u'mgmtController', u'storageSasUplink'], ["Get"]),
"StorageUnusedLocalDisk": MoMeta("StorageUnusedLocalDisk", "storageUnusedLocalDisk", "pd-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageVirtualDriveCreatorUsingUnusedPhysicalDrive'], [], ["Get"]),
"StorageVirtualDrive": MoMeta("StorageVirtualDrive", "storageVirtualDrive", "vd-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [u'faultInst', u'storageLocalDiskUsage', u'storageOperation'], ["Get", "Remove", "Set"]),
"StorageVirtualDriveCreatorUsingUnusedPhysicalDrive": MoMeta("StorageVirtualDriveCreatorUsingUnusedPhysicalDrive", "storageVirtualDriveCreatorUsingUnusedPhysicalDrive", "virtual-drive-create", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'storageController'], [u'storageUnusedLocalDisk'], ["Get", "Set"]),
"StorageVirtualDriveCreatorUsingVirtualDriveGroup": MoMeta("StorageVirtualDriveCreatorUsingVirtualDriveGroup", "storageVirtualDriveCreatorUsingVirtualDriveGroup", "virtual-drive-carve", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'storageController'], [u'storageVirtualDriveWithDriveGroupSpace'], ["Get", "Set"]),
"StorageVirtualDriveWithDriveGroupSpace": MoMeta("StorageVirtualDriveWithDriveGroupSpace", "storageVirtualDriveWithDriveGroupSpace", "vd-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageVirtualDriveCreatorUsingVirtualDriveGroup'], [], ["Get"]),
"SuggestedStorageControllerSecurityKey": MoMeta("SuggestedStorageControllerSecurityKey", "suggestedStorageControllerSecurityKey", "suggested-sec-key", VersionMeta.Version303a, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [], ["Get"]),
"SysdebugMEpLog": MoMeta("SysdebugMEpLog", "sysdebugMEpLog", "log-[type]-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'mgmtController'], [u'faultInst'], ["Get", "Set"]),
"SysdebugTechSupportExport": MoMeta("SysdebugTechSupportExport", "sysdebugTechSupportExport", "tech-support", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis', u'topSystem'], [], [None]),
"TopRoot": MoMeta("TopRoot", "topRoot", "", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [], [u'topSystem'], ["Get"]),
"TopSystem": MoMeta("TopSystem", "topSystem", "sys", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topRoot'], [u'aaaLdap', u'aaaUserEp', u'certificateManagement', u'commSvcEp', u'equipmentChassis', u'sysdebugTechSupportExport'], ["Get", "Set"]),
"UploadBiosProfile": MoMeta("UploadBiosProfile", "uploadBiosProfile", "upload-bios-profile", VersionMeta.Version301c, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'biosProfileManagement'], [], [None]),
"UploadCertificate": MoMeta("UploadCertificate", "uploadCertificate", "upload-cert", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'certificateManagement'], [], [None]),
"UploadPIDCatalog": MoMeta("UploadPIDCatalog", "uploadPIDCatalog", "upload-catalog", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'chassisPIDCatalog'], [], ["Get"]),
"VicBackupAll": MoMeta("VicBackupAll", "vicBackupAll", "vic-all-exportconfig", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], ["Get", "Set"]),
"VicImporterAll": MoMeta("VicImporterAll", "vicImporterAll", "vic-all-importconfig", VersionMeta.Version303a, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], ["Get", "Set"]),
"AutoPowerProfile": MoMeta("AutoPowerProfile", "autoPowerProfile", "auto-prof", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'chassisPowerBudget'], [], ["Get", "Set"]),
"BmcResetReason": MoMeta("BmcResetReason", "bmcResetReason", "bmc-reset-reason", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get"]),
"ChassisPIDCatalog": MoMeta("ChassisPIDCatalog", "chassisPIDCatalog", "pid", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [u'uploadPIDCatalog'], ["Get"]),
"ChassisPowerBudget": MoMeta("ChassisPowerBudget", "chassisPowerBudget", "budget", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [u'autoPowerProfile'], ["Get"]),
"ChassisPowerMonitor": MoMeta("ChassisPowerMonitor", "chassisPowerMonitor", "pwrmonitor", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], ["Get"]),
"ChassisPowerUtilization": MoMeta("ChassisPowerUtilization", "chassisPowerUtilization", "utilization", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], ["Get"]),
"CommEpIpmiLan": MoMeta("CommEpIpmiLan", "commEpIpmiLan", "cmc-ipmi-lan", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentSharedIOModule', u'equipmentSystemIOController'], [], ["Get", "Set"]),
"CommSvcRack": MoMeta("CommSvcRack", "commSvcRack", "svc-ext", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'commIpmiLan', u'commKvm', u'commVMedia'], ["Get"]),
"ComputeServerNode": MoMeta("ComputeServerNode", "computeServerNode", "server-[server_id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [u'adaptorUnit', u'biosUnit', u'bmcResetReason', u'commSvcRack', u'computeBoard', u'equipmentFanModule', u'equipmentIndicatorLed', u'equipmentLocatorLed', u'equipmentPsu', u'faultInst', u'huuController', u'ioExpander', u'iodController', u'kmipManagement', u'lsbootDef', u'lsbootDevPrecision', u'mgmtBackupServer', u'mgmtController', u'mgmtImporterServer', u'networkAdapterUnit', u'oneTimeBootDevice', u'oneTimePrecisionBootDevice', u'osiController', u'pciEquipSlot', u'powerBudget', u'powerMonitor', u'serverUtilization', u'solIf'], ["Get", "Set"]),
"ComputeServerNodeMbTempStats": MoMeta("ComputeServerNodeMbTempStats", "computeServerNodeMbTempStats", "temp-stats", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeBoard'], [], ["Get"]),
"ComputeServerRef": MoMeta("ComputeServerRef", "computeServerRef", "server-ref-[ownership]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageEnclosureDiskSlotEp'], [], ["Get"]),
"ComputeSharedIOMbPowerStats": MoMeta("ComputeSharedIOMbPowerStats", "computeSharedIOMbPowerStats", "power-stats", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentSharedIOModule'], [], ["Get"]),
"ComputeSharedIOMbTempStats": MoMeta("ComputeSharedIOMbTempStats", "computeSharedIOMbTempStats", "temp-stats", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentSharedIOModule'], [], ["Get"]),
"CustomPowerProfile": MoMeta("CustomPowerProfile", "customPowerProfile", "cust-prof", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'powerBudget'], [], ["Get", "Set"]),
"EquipmentChassis": MoMeta("EquipmentChassis", "equipmentChassis", "chassis-1", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'topSystem'], [u'chassisPIDCatalog', u'chassisPowerBudget', u'chassisPowerMonitor', u'chassisPowerUtilization', u'computeServerNode', u'equipmentChassisLocatorLed', u'equipmentFanModule', u'equipmentIndicatorLed', u'equipmentPsu', u'equipmentSystemIOController', u'eventManagement', u'faultInst', u'mgmtBackup', u'mgmtIf', u'mgmtImporter', u'mgmtInventory', u'storageEnclosure', u'storageSasExpander', u'sysdebugTechSupportExport', u'vicBackupAll', u'vicImporterAll'], ["Get"]),
"EquipmentChassisLocatorLed": MoMeta("EquipmentChassisLocatorLed", "equipmentChassisLocatorLed", "chassis-locator-led", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [], ["Get", "Set"]),
"EquipmentSharedIOModule": MoMeta("EquipmentSharedIOModule", "equipmentSharedIOModule", "shared-io-module-[slot_id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentSystemIOController'], [u'commEpIpmiLan', u'computeSharedIOMbPowerStats', u'computeSharedIOMbTempStats', u'mgmtController'], ["Get"]),
"EquipmentSystemIOController": MoMeta("EquipmentSystemIOController", "equipmentSystemIOController", "slot-[id]", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [u'commEpIpmiLan', u'equipmentSharedIOModule', u'mgmtController', u'siocResetReason'], ["Get", "Set"]),
"IoExpander": MoMeta("IoExpander", "ioExpander", "io-expander", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get"]),
"MgmtBackupServer": MoMeta("MgmtBackupServer", "mgmtBackupServer", "exporter-config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get", "Set"]),
"MgmtImporterServer": MoMeta("MgmtImporterServer", "mgmtImporterServer", "importer-config", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get", "Set"]),
"SiocResetReason": MoMeta("SiocResetReason", "siocResetReason", "sioc-reset-reason", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'equipmentSystemIOController'], [], ["Get"]),
"StorageEnclosure": MoMeta("StorageEnclosure", "storageEnclosure", "enc-1", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'equipmentChassis'], [u'storageEnclosureDisk', u'storageEnclosureDiskFwHelper', u'storageEnclosureDiskSlotEp', u'storageEnclosureDiskSlotZoneHelper'], ["Get", "Set"]),
"StorageEnclosureDisk": MoMeta("StorageEnclosureDisk", "storageEnclosureDisk", "disk-[slot]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageEnclosure'], [], ["Get"]),
"StorageEnclosureDiskFwHelper": MoMeta("StorageEnclosureDiskFwHelper", "storageEnclosureDiskFwHelper", "drive-fw-update", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'storageEnclosure'], [], ["Get", "Set"]),
"StorageEnclosureDiskSlotEp": MoMeta("StorageEnclosureDiskSlotEp", "storageEnclosureDiskSlotEp", "disk-slot-[slot]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageEnclosure'], [u'computeServerRef', u'faultInst'], ["Get"]),
"StorageEnclosureDiskSlotZoneHelper": MoMeta("StorageEnclosureDiskSlotZoneHelper", "storageEnclosureDiskSlotZoneHelper", "zone-drive", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin"], [u'storageEnclosure'], [], ["Get", "Set"]),
"StorageLocalDiskEp": MoMeta("StorageLocalDiskEp", "storageLocalDiskEp", "diskRef-[id]", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["admin", "read-only", "user"], [u'storageController'], [u'faultInst'], ["Get"]),
"StorageSasUplink": MoMeta("StorageSasUplink", "storageSasUplink", "uplink", VersionMeta.Version2013e, "OutputOnly", 0x1, [], ["read-only"], [u'storageSasExpander'], [], ["Get"]),
"ThermalPowerProfile": MoMeta("ThermalPowerProfile", "thermalPowerProfile", "thermal-prof", VersionMeta.Version2013e, "InputOutput", 0x1, [], ["admin", "read-only", "user"], [u'powerBudget'], [], ["Get", "Set"]),
}
}
METHOD_CLASS_ID = frozenset([
"AaaGetComputeAuthTokens",
"AaaKeepAlive",
"AaaLogin",
"AaaLogout",
"AaaRefresh",
"ConfigConfMo",
"ConfigConfMos",
"ConfigResolveChildren",
"ConfigResolveClass",
"ConfigResolveDn",
"ConfigResolveParent",
"EventSubscribe",
"EventUnsubscribe",
])
OTHER_TYPE_CLASS_ID = {
"Method": "imcbasetype",
"AllbitsFilter": "imcfiltertype",
"AndFilter": "imcfiltertype",
"AnybitFilter": "imcfiltertype",
"BwFilter": "imcfiltertype",
"ConfigConfig": "imcbasetype",
"ConfigMap": "imcbasetype",
"ConfigSet": "imcbasetype",
"EqFilter": "imcfiltertype",
"FailedMo": "imcbasetype",
"FailedMos": "imcbasetype",
"FilterFilter": "imcbasetype",
"GeFilter": "imcfiltertype",
"GtFilter": "imcfiltertype",
"LeFilter": "imcfiltertype",
"LtFilter": "imcfiltertype",
"NeFilter": "imcfiltertype",
"NotFilter": "imcfiltertype",
"OrFilter": "imcfiltertype",
"Pair": "imcbasetype",
"WcardFilter": "imcfiltertype",
}
| 157.929577
| 2,698
| 0.688778
| 13,274
| 168,195
| 8.725102
| 0.067651
| 0.042135
| 0.076448
| 0.063825
| 0.864631
| 0.857076
| 0.855557
| 0.843348
| 0.832339
| 0.808249
| 0
| 0.024175
| 0.100621
| 168,195
| 1,064
| 2,699
| 158.078008
| 0.741451
| 0.004138
| 0
| 0.046243
| 0
| 0
| 0.567924
| 0.179585
| 0
| 0
| 0.011214
| 0
| 0.007707
| 1
| 0
| false
| 0.014451
| 0.017341
| 0
| 0.034682
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5041b3f65061973932c8af1093559c7c817fd4bc
| 150
|
py
|
Python
|
mantisshrimp/datasets/pets/__init__.py
|
ramaneswaran/mantisshrimp
|
d30c056f1f9f26a2ce42da73cfb32d591321f426
|
[
"Apache-2.0"
] | null | null | null |
mantisshrimp/datasets/pets/__init__.py
|
ramaneswaran/mantisshrimp
|
d30c056f1f9f26a2ce42da73cfb32d591321f426
|
[
"Apache-2.0"
] | 8
|
2020-06-16T18:06:42.000Z
|
2020-09-15T22:35:56.000Z
|
mantisshrimp/datasets/pets/__init__.py
|
ramaneswaran/mantisshrimp
|
d30c056f1f9f26a2ce42da73cfb32d591321f426
|
[
"Apache-2.0"
] | null | null | null |
from mantisshrimp.datasets.pets.data import *
from mantisshrimp.datasets.pets.parsers import *
from mantisshrimp.datasets.pets.visualization import *
| 37.5
| 54
| 0.84
| 18
| 150
| 7
| 0.444444
| 0.380952
| 0.571429
| 0.666667
| 0.539683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 150
| 3
| 55
| 50
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ac966c017cb7bd82a1ca26d53acb914f3e574b47
| 4,281
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowPlatform/cli/equal/golden_output3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowPlatform/cli/equal/golden_output3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowPlatform/cli/equal/golden_output3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"slot": {
"rp": {
"0/RSP0": {
"name": "A9K-RSP440-SE",
"full_slot": "0/RSP0/CPU0",
"state": "IOS XR RUN",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "Active"
}
},
"lc": {
"0/0": {
"name": "A9K-MOD80-SE",
"full_slot": "0/0/CPU0",
"state": "IOS XR RUN",
"config_state": "PWR,NSHUT,MON",
"subslot": {
"0": {
"name": "A9K-MPA-20X1GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
},
"1": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
}
}
},
"0/1": {
"name": "A9K-24x10GE-TR",
"full_slot": "0/1/CPU0",
"state": "IOS XR RUN",
"config_state": "PWR,NSHUT,MON"
},
"0/2": {
"name": "A9K-MOD80-SE",
"full_slot": "0/2/CPU0",
"state": "IOS XR RUN",
"config_state": "PWR,NSHUT,MON",
"subslot": {
"0": {
"name": "A9K-MPA-20X1GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
},
"1": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
}
}
},
"0/3": {
"name": "A9K-MOD80-TR",
"full_slot": "0/3/CPU0",
"state": "IOS XR RUN",
"config_state": "PWR,NSHUT,MON",
"subslot": {
"0": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
},
"1": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
}
}
},
"0/4": {
"name": "A9K-MOD80-TR",
"full_slot": "0/4/CPU0",
"state": "IOS XR RUN",
"config_state": "PWR,NSHUT,MON",
"subslot": {
"0": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
},
"1": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
}
}
},
"0/5": {
"name": "A9K-MOD80-TR",
"full_slot": "0/5/CPU0",
"state": "IOS XR RUN",
"config_state": "PWR,NSHUT,MON",
"subslot": {
"0": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
},
"1": {
"name": "A9K-MPA-4X10GE",
"state": "OK",
"config_state": "PWR,NSHUT,MON",
"redundancy_state": "None"
}
}
}
}
}
}
| 35.090164
| 56
| 0.286849
| 303
| 4,281
| 3.933993
| 0.122112
| 0.099832
| 0.199664
| 0.270973
| 0.916107
| 0.916107
| 0.916107
| 0.807047
| 0.807047
| 0.807047
| 0
| 0.058665
| 0.565989
| 4,281
| 121
| 57
| 35.380165
| 0.582885
| 0
| 0
| 0.603306
| 0
| 0
| 0.307405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
acd92a0b625f087d2c37dbcc025de91cb62e6b25
| 66,132
|
py
|
Python
|
delvewheel/dll_list.py
|
AntoineD/delvewheel
|
8f5e837d6e09e9c19adfa70616d0e2cc9167811e
|
[
"MIT"
] | 47
|
2020-12-29T02:03:38.000Z
|
2022-03-29T20:36:40.000Z
|
delvewheel/dll_list.py
|
AntoineD/delvewheel
|
8f5e837d6e09e9c19adfa70616d0e2cc9167811e
|
[
"MIT"
] | 27
|
2021-01-04T21:46:24.000Z
|
2022-03-26T02:12:11.000Z
|
delvewheel/dll_list.py
|
AntoineD/delvewheel
|
8f5e837d6e09e9c19adfa70616d0e2cc9167811e
|
[
"MIT"
] | 8
|
2020-12-24T18:01:13.000Z
|
2022-02-02T10:47:37.000Z
|
"""Lists of DLLs to handle specially."""
import re
# ignore_names_32 is a set containing the lowercase names of all DLLs that can
# be assumed to be present on 32-bit Windows 7 or later. These are all the files
# with extension .dll or .drv found in C:\Windows\SysWOW64 on a vanilla Windows
# 7 Ultimate SP1 64-bit installation.
ignore_names_32 = {
'aaclient.dll',
'accessibilitycpl.dll',
'acctres.dll',
'acledit.dll',
'aclui.dll',
'acppage.dll',
'actioncenter.dll',
'actioncentercpl.dll',
'activeds.dll',
'actxprxy.dll',
'admtmpl.dll',
'adprovider.dll',
'adsldp.dll',
'adsldpc.dll',
'adsmsext.dll',
'adsnt.dll',
'adtschema.dll',
'advapi32.dll',
'advpack.dll',
'aecache.dll',
'aeevts.dll',
'alttab.dll',
'amstream.dll',
'amxread.dll',
'apds.dll',
'api-ms-win-core-file-l1-2-0.dll',
'api-ms-win-core-file-l2-1-0.dll',
'api-ms-win-core-localization-l1-2-0.dll',
'api-ms-win-core-processthreads-l1-1-1.dll',
'api-ms-win-core-synch-l1-2-0.dll',
'api-ms-win-core-timezone-l1-1-0.dll',
'api-ms-win-core-xstate-l2-1-0.dll',
'api-ms-win-crt-conio-l1-1-0.dll',
'api-ms-win-crt-convert-l1-1-0.dll',
'api-ms-win-crt-environment-l1-1-0.dll',
'api-ms-win-crt-filesystem-l1-1-0.dll',
'api-ms-win-crt-heap-l1-1-0.dll',
'api-ms-win-crt-locale-l1-1-0.dll',
'api-ms-win-crt-math-l1-1-0.dll',
'api-ms-win-crt-multibyte-l1-1-0.dll',
'api-ms-win-crt-private-l1-1-0.dll',
'api-ms-win-crt-process-l1-1-0.dll',
'api-ms-win-crt-runtime-l1-1-0.dll',
'api-ms-win-crt-stdio-l1-1-0.dll',
'api-ms-win-crt-string-l1-1-0.dll',
'api-ms-win-crt-time-l1-1-0.dll',
'api-ms-win-crt-utility-l1-1-0.dll',
'apilogen.dll',
'apircl.dll',
'apisetschema.dll',
'apphelp.dll',
'apphlpdm.dll',
'appidapi.dll',
'appidpolicyengineapi.dll',
'appmgmts.dll',
'appmgr.dll',
'apss.dll',
'asferror.dll',
'aspnet_counters.dll',
'asycfilt.dll',
'atl.dll',
'atmfd.dll',
'atmlib.dll',
'audiodev.dll',
'audioeng.dll',
'audiokse.dll',
'audioses.dll',
'auditnativesnapin.dll',
'auditpolicygpinterop.dll',
'auditpolmsg.dll',
'authfwcfg.dll',
'authfwgp.dll',
'authfwsnapin.dll',
'authfwwizfwk.dll',
'authui.dll',
'authz.dll',
'autoplay.dll',
'auxiliarydisplayapi.dll',
'auxiliarydisplaycpl.dll',
'avicap32.dll',
'avifil32.dll',
'avrt.dll',
'azroles.dll',
'azroleui.dll',
'azsqlext.dll',
'basecsp.dll',
'batmeter.dll',
'bcrypt.dll',
'bcryptprimitives.dll',
'bidispl.dll',
'biocredprov.dll',
'bitsperf.dll',
'bitsprx2.dll',
'bitsprx3.dll',
'bitsprx4.dll',
'bitsprx5.dll',
'bitsprx6.dll',
'blackbox.dll',
'bootvid.dll',
'browcli.dll',
'browseui.dll',
'btpanui.dll',
'bwcontexthandler.dll',
'bwunpairelevated.dll',
'cabinet.dll',
'cabview.dll',
'capiprovider.dll',
'capisp.dll',
'catsrv.dll',
'catsrvps.dll',
'catsrvut.dll',
'cca.dll',
'cdosys.dll',
'certcli.dll',
'certcredprovider.dll',
'certenc.dll',
'certenroll.dll',
'certenrollui.dll',
'certmgr.dll',
'certpoleng.dll',
'cewmdm.dll',
'cfgbkend.dll',
'cfgmgr32.dll',
'chsbrkr.dll',
'chtbrkr.dll',
'chxreadingstringime.dll',
'cic.dll',
'clb.dll',
'clbcatq.dll',
'clfsw32.dll',
'cliconfg.dll',
'clusapi.dll',
'cmcfg32.dll',
'cmdial32.dll',
'cmicryptinstall.dll',
'cmifw.dll',
'cmipnpinstall.dll',
'cmlua.dll',
'cmpbk32.dll',
'cmstplua.dll',
'cmutil.dll',
'cngaudit.dll',
'cngprovider.dll',
'cnvfat.dll',
'colbact.dll',
'colorcnv.dll',
'colorui.dll',
'comcat.dll',
'comctl32.dll',
'comdlg32.dll',
'compobj.dll',
'compstui.dll',
'comrepl.dll',
'comres.dll',
'comsnap.dll',
'comsvcs.dll',
'comuid.dll',
'connect.dll',
'console.dll',
'cpfilters.dll',
'credssp.dll',
'credui.dll',
'crtdll.dll',
'crypt32.dll',
'cryptbase.dll',
'cryptdlg.dll',
'cryptdll.dll',
'cryptext.dll',
'cryptnet.dll',
'cryptsp.dll',
'cryptsvc.dll',
'cryptui.dll',
'cryptxml.dll',
'cscapi.dll',
'cscdll.dll',
'cscobj.dll',
'ctl3d32.dll',
'c_g18030.dll',
'c_is2022.dll',
'c_iscii.dll',
'd2d1.dll',
'd3d10.dll',
'd3d10core.dll',
'd3d10level9.dll',
'd3d10warp.dll',
'd3d10_1.dll',
'd3d10_1core.dll',
'd3d11.dll',
'd3d8.dll',
'd3d8thk.dll',
'd3d9.dll',
'd3dcompiler_47.dll',
'd3dim.dll',
'd3dim700.dll',
'd3dramp.dll',
'd3dxof.dll',
'dataclen.dll',
'davclnt.dll',
'davhlpr.dll',
'dbgeng.dll',
'dbghelp.dll',
'dbnetlib.dll',
'dbnmpntw.dll',
'dciman32.dll',
'ddaclsys.dll',
'ddoiproxy.dll',
'ddores.dll',
'ddraw.dll',
'ddrawex.dll',
'defaultlocationcpl.dll',
'deskadp.dll',
'deskmon.dll',
'deskperf.dll',
'devenum.dll',
'devicecenter.dll',
'devicedisplaystatusmanager.dll',
'devicemetadataparsers.dll',
'devicepairing.dll',
'devicepairingfolder.dll',
'devicepairinghandler.dll',
'devicepairingproxy.dll',
'deviceuxres.dll',
'devmgr.dll',
'devobj.dll',
'devrtl.dll',
'dfscli.dll',
'dfshim.dll',
'dfsshlex.dll',
'dhcpcmonitor.dll',
'dhcpcore.dll',
'dhcpcore6.dll',
'dhcpcsvc.dll',
'dhcpcsvc6.dll',
'dhcpqec.dll',
'dhcpsapi.dll',
'difxapi.dll',
'dimsjob.dll',
'dimsroam.dll',
'dinput.dll',
'dinput8.dll',
'diskcopy.dll',
'dispex.dll',
'display.dll',
'dmband.dll',
'dmcompos.dll',
'dmdlgs.dll',
'dmdskmgr.dll',
'dmdskres.dll',
'dmdskres2.dll',
'dmime.dll',
'dmintf.dll',
'dmloader.dll',
'dmocx.dll',
'dmrc.dll',
'dmscript.dll',
'dmstyle.dll',
'dmsynth.dll',
'dmusic.dll',
'dmutil.dll',
'dmvdsitf.dll',
'dnsapi.dll',
'dnscmmc.dll',
'docprop.dll',
'dot3api.dll',
'dot3cfg.dll',
'dot3dlg.dll',
'dot3gpclnt.dll',
'dot3gpui.dll',
'dot3hc.dll',
'dot3msm.dll',
'dot3ui.dll',
'dpapiprovider.dll',
'dplayx.dll',
'dpmodemx.dll',
'dpnaddr.dll',
'dpnathlp.dll',
'dpnet.dll',
'dpnhpast.dll',
'dpnhupnp.dll',
'dpnlobby.dll',
'dpwsockx.dll',
'dpx.dll',
'drmmgrtn.dll',
'drmv2clt.dll',
'drprov.dll',
'drt.dll',
'drtprov.dll',
'drttransport.dll',
'drvstore.dll',
'ds32gt.dll',
'dsauth.dll',
'dsdmo.dll',
'dshowrdpfilter.dll',
'dskquota.dll',
'dskquoui.dll',
'dsound.dll',
'dsprop.dll',
'dsquery.dll',
'dsrole.dll',
'dssec.dll',
'dssenh.dll',
'dsuiext.dll',
'dswave.dll',
'dtsh.dll',
'dui70.dll',
'duser.dll',
'dwmapi.dll',
'dwmcore.dll',
'dwrite.dll',
'dxdiagn.dll',
'dxgi.dll',
'dxmasf.dll',
'dxptaskringtone.dll',
'dxptasksync.dll',
'dxtmsft.dll',
'dxtrans.dll',
'dxva2.dll',
'eapp3hst.dll',
'eappcfg.dll',
'eappgnui.dll',
'eapphost.dll',
'eappprxy.dll',
'eapqec.dll',
'efsadu.dll',
'efscore.dll',
'efsutil.dll',
'ehstorapi.dll',
'ehstorpwdmgr.dll',
'ehstorshell.dll',
'els.dll',
'elscore.dll',
'elshyph.dll',
'elslad.dll',
'elstrans.dll',
'encapi.dll',
'encdec.dll',
'eqossnap.dll',
'es.dll',
'esent.dll',
'esentprf.dll',
'eventcls.dll',
'evr.dll',
'explorerframe.dll',
'expsrv.dll',
'f3ahvoas.dll',
'faultrep.dll',
'fdbth.dll',
'fdbthproxy.dll',
'fde.dll',
'fdeploy.dll',
'fdpnp.dll',
'fdproxy.dll',
'fdssdp.dll',
'fdwcn.dll',
'fdwnet.dll',
'fdwsd.dll',
'feclient.dll',
'filemgmt.dll',
'findnetprinters.dll',
'firewallapi.dll',
'firewallcontrolpanel.dll',
'fltlib.dll',
'fmifs.dll',
'fms.dll',
'fontext.dll',
'fontsub.dll',
'fphc.dll',
'framedyn.dll',
'framedynos.dll',
'fthsvc.dll',
'fundisc.dll',
'fwcfg.dll',
'fwpuclnt.dll',
'fwremotesvr.dll',
'fxsapi.dll',
'fxscom.dll',
'fxscomex.dll',
'fxsext32.dll',
'fxsresm.dll',
'fxsxp32.dll',
'gameux.dll',
'gameuxlegacygdfs.dll',
'gcdef.dll',
'gdi32.dll',
'getuname.dll',
'glmf32.dll',
'glu32.dll',
'gpapi.dll',
'gpedit.dll',
'gpprefcl.dll',
'gpprnext.dll',
'gpscript.dll',
'gptext.dll',
'hbaapi.dll',
'hcproviders.dll',
'helppaneproxy.dll',
'hgcpl.dll',
'hhsetup.dll',
'hid.dll',
'hidserv.dll',
'hlink.dll',
'hnetcfg.dll',
'hnetmon.dll',
'httpapi.dll',
'htui.dll',
'ias.dll',
'iasacct.dll',
'iasads.dll',
'iasdatastore.dll',
'iashlpr.dll',
'iasmigplugin.dll',
'iasnap.dll',
'iaspolcy.dll',
'iasrad.dll',
'iasrecst.dll',
'iassam.dll',
'iassdo.dll',
'iassvcs.dll',
'icardie.dll',
'icardres.dll',
'iccvid.dll',
'icm32.dll',
'icmp.dll',
'icmui.dll',
'iconcodecservice.dll',
'icsigd.dll',
'idndl.dll',
'idstore.dll',
'ieadvpack.dll',
'ieapfltr.dll',
'iedkcs32.dll',
'ieetwproxystub.dll',
'ieframe.dll',
'iepeers.dll',
'iernonce.dll',
'iertutil.dll',
'iesetup.dll',
'iesysprep.dll',
'ieui.dll',
'ifmon.dll',
'ifsutil.dll',
'ifsutilx.dll',
'imagehlp.dll',
'imageres.dll',
'imagesp1.dll',
'imapi.dll',
'imapi2.dll',
'imapi2fs.dll',
'imgutil.dll',
'imjp10k.dll',
'imm32.dll',
'inetcomm.dll',
'inetmib1.dll',
'inetres.dll',
'infocardapi.dll',
'inked.dll',
'input.dll',
'inseng.dll',
'iologmsg.dll',
'ipbusenumproxy.dll',
'iphlpapi.dll',
'iprop.dll',
'iprtprio.dll',
'iprtrmgr.dll',
'ipsecsnp.dll',
'ipsmsnap.dll',
'ir32_32.dll',
'ir41_qc.dll',
'ir41_qcx.dll',
'ir50_32.dll',
'ir50_qc.dll',
'ir50_qcx.dll',
'irclass.dll',
'iscsicpl.dll',
'iscsidsc.dll',
'iscsied.dll',
'iscsium.dll',
'iscsiwmi.dll',
'itircl.dll',
'itss.dll',
'itvdata.dll',
'iyuv_32.dll',
'javascriptcollectionagent.dll',
'jscript.dll',
'jscript9.dll',
'jscript9diag.dll',
'jsintl.dll',
'jsproxy.dll',
'kbd101.dll',
'kbd101a.dll',
'kbd101b.dll',
'kbd101c.dll',
'kbd103.dll',
'kbd106.dll',
'kbd106n.dll',
'kbda1.dll',
'kbda2.dll',
'kbda3.dll',
'kbdal.dll',
'kbdarme.dll',
'kbdarmw.dll',
'kbdax2.dll',
'kbdaze.dll',
'kbdazel.dll',
'kbdbash.dll',
'kbdbe.dll',
'kbdbene.dll',
'kbdbgph.dll',
'kbdbgph1.dll',
'kbdbhc.dll',
'kbdblr.dll',
'kbdbr.dll',
'kbdbu.dll',
'kbdbulg.dll',
'kbdca.dll',
'kbdcan.dll',
'kbdcr.dll',
'kbdcz.dll',
'kbdcz1.dll',
'kbdcz2.dll',
'kbdda.dll',
'kbddiv1.dll',
'kbddiv2.dll',
'kbddv.dll',
'kbdes.dll',
'kbdest.dll',
'kbdfa.dll',
'kbdfc.dll',
'kbdfi.dll',
'kbdfi1.dll',
'kbdfo.dll',
'kbdfr.dll',
'kbdgae.dll',
'kbdgeo.dll',
'kbdgeoer.dll',
'kbdgeoqw.dll',
'kbdgkl.dll',
'kbdgr.dll',
'kbdgr1.dll',
'kbdgrlnd.dll',
'kbdhau.dll',
'kbdhe.dll',
'kbdhe220.dll',
'kbdhe319.dll',
'kbdheb.dll',
'kbdhela2.dll',
'kbdhela3.dll',
'kbdhept.dll',
'kbdhu.dll',
'kbdhu1.dll',
'kbdibm02.dll',
'kbdibo.dll',
'kbdic.dll',
'kbdinasa.dll',
'kbdinbe1.dll',
'kbdinbe2.dll',
'kbdinben.dll',
'kbdindev.dll',
'kbdinguj.dll',
'kbdinhin.dll',
'kbdinkan.dll',
'kbdinmal.dll',
'kbdinmar.dll',
'kbdinori.dll',
'kbdinpun.dll',
'kbdintam.dll',
'kbdintel.dll',
'kbdinuk2.dll',
'kbdir.dll',
'kbdit.dll',
'kbdit142.dll',
'kbdiulat.dll',
'kbdjpn.dll',
'kbdkaz.dll',
'kbdkhmr.dll',
'kbdkor.dll',
'kbdkyr.dll',
'kbdla.dll',
'kbdlao.dll',
'kbdlk41a.dll',
'kbdlt.dll',
'kbdlt1.dll',
'kbdlt2.dll',
'kbdlv.dll',
'kbdlv1.dll',
'kbdmac.dll',
'kbdmacst.dll',
'kbdmaori.dll',
'kbdmlt47.dll',
'kbdmlt48.dll',
'kbdmon.dll',
'kbdmonmo.dll',
'kbdne.dll',
'kbdnec.dll',
'kbdnec95.dll',
'kbdnecat.dll',
'kbdnecnt.dll',
'kbdnepr.dll',
'kbdno.dll',
'kbdno1.dll',
'kbdnso.dll',
'kbdpash.dll',
'kbdpl.dll',
'kbdpl1.dll',
'kbdpo.dll',
'kbdro.dll',
'kbdropr.dll',
'kbdrost.dll',
'kbdru.dll',
'kbdru1.dll',
'kbdsf.dll',
'kbdsg.dll',
'kbdsl.dll',
'kbdsl1.dll',
'kbdsmsfi.dll',
'kbdsmsno.dll',
'kbdsn1.dll',
'kbdsorex.dll',
'kbdsors1.dll',
'kbdsorst.dll',
'kbdsp.dll',
'kbdsw.dll',
'kbdsw09.dll',
'kbdsyr1.dll',
'kbdsyr2.dll',
'kbdtajik.dll',
'kbdtat.dll',
'kbdth0.dll',
'kbdth1.dll',
'kbdth2.dll',
'kbdth3.dll',
'kbdtiprc.dll',
'kbdtuf.dll',
'kbdtuq.dll',
'kbdturme.dll',
'kbdughr.dll',
'kbdughr1.dll',
'kbduk.dll',
'kbdukx.dll',
'kbdur.dll',
'kbdur1.dll',
'kbdurdu.dll',
'kbdus.dll',
'kbdusa.dll',
'kbdusl.dll',
'kbdusr.dll',
'kbdusx.dll',
'kbduzb.dll',
'kbdvntc.dll',
'kbdwol.dll',
'kbdyak.dll',
'kbdyba.dll',
'kbdycc.dll',
'kbdycl.dll',
'kerberos.dll',
'kernel32.dll',
'kernelbase.dll',
'keyiso.dll',
'keymgr.dll',
'korwbrkr.dll',
'ksuser.dll',
'ktmw32.dll',
'l2gpstore.dll',
'l2nacp.dll',
'l2sechc.dll',
'laprxy.dll',
'licmgr10.dll',
'linkinfo.dll',
'loadperf.dll',
'localsec.dll',
'locationapi.dll',
'loghours.dll',
'logoncli.dll',
'lpk.dll',
'lsmproxy.dll',
'luainstall.dll',
'lz32.dll',
'magnification.dll',
'mapi32.dll',
'mapistub.dll',
'mcewmdrmndbootstrap.dll',
'mciavi32.dll',
'mcicda.dll',
'mciqtz32.dll',
'mciseq.dll',
'mciwave.dll',
'mctres.dll',
'mdminst.dll',
'mediametadatahandler.dll',
'mf.dll',
'mf3216.dll',
'mfaacenc.dll',
'mfc40.dll',
'mfc40u.dll',
'mfc42.dll',
'mfc42u.dll',
'mfcsubs.dll',
'mfds.dll',
'mfdvdec.dll',
'mferror.dll',
'mfh264enc.dll',
'mfmjpegdec.dll',
'mfplat.dll',
'mfplay.dll',
'mfps.dll',
'mfreadwrite.dll',
'mfvdsp.dll',
'mfwmaaec.dll',
'mgmtapi.dll',
'midimap.dll',
'migisol.dll',
'miguiresource.dll',
'mimefilt.dll',
'mlang.dll',
'mmcbase.dll',
'mmci.dll',
'mmcico.dll',
'mmcndmgr.dll',
'mmcshext.dll',
'mmdevapi.dll',
'mmres.dll',
'modemui.dll',
'moricons.dll',
'mp3dmod.dll',
'mp43decd.dll',
'mp4sdecd.dll',
'mpg4decd.dll',
'mpr.dll',
'mprapi.dll',
'mprddm.dll',
'mprdim.dll',
'mprmsg.dll',
'msaatext.dll',
'msac3enc.dll',
'msacm32.dll',
'msafd.dll',
'msasn1.dll',
'msaudite.dll',
'mscandui.dll',
'mscat32.dll',
'msclmd.dll',
'mscms.dll',
'mscoree.dll',
'mscorier.dll',
'mscories.dll',
'mscpx32r.dll',
'mscpxl32.dll',
'msctf.dll',
'msctfmonitor.dll',
'msctfp.dll',
'msctfui.dll',
'msdadiag.dll',
'msdart.dll',
'msdelta.dll',
'msdmo.dll',
'msdrm.dll',
'msdtcprx.dll',
'msdtcuiu.dll',
'msdtcvsp1res.dll',
'msexch40.dll',
'msexcl40.dll',
'msfeeds.dll',
'msfeedsbs.dll',
'msftedit.dll',
'mshtml.dll',
'mshtmldac.dll',
'mshtmled.dll',
'mshtmler.dll',
'mshtmlmedia.dll',
'msi.dll',
'msidcrl30.dll',
'msident.dll',
'msidle.dll',
'msidntld.dll',
'msieftp.dll',
'msihnd.dll',
'msiltcfg.dll',
'msimg32.dll',
'msimsg.dll',
'msimtf.dll',
'msisip.dll',
'msjet40.dll',
'msjetoledb40.dll',
'msjint40.dll',
'msjter40.dll',
'msjtes40.dll',
'msls31.dll',
'msltus40.dll',
'msmpeg2adec.dll',
'msmpeg2enc.dll',
'msmpeg2vdec.dll',
'msnetobj.dll',
'msobjs.dll',
'msoeacct.dll',
'msoert2.dll',
'msorc32r.dll',
'msorcl32.dll',
'mspatcha.dll',
'mspbde40.dll',
'msports.dll',
'msrating.dll',
'msrd2x40.dll',
'msrd3x40.dll',
'msrdc.dll',
'msrdpwebaccess.dll',
'msrepl40.dll',
'msrle32.dll',
'msscntrs.dll',
'msscp.dll',
'mssha.dll',
'msshavmsg.dll',
'msshooks.dll',
'mssign32.dll',
'mssip32.dll',
'mssitlb.dll',
'mssph.dll',
'mssphtb.dll',
'mssprxy.dll',
'mssrch.dll',
'mssvp.dll',
'msswch.dll',
'mstask.dll',
'mstext40.dll',
'mstscax.dll',
'msutb.dll',
'msv1_0.dll',
'msvbvm60.dll',
'msvcirt.dll',
'msvcp110_clr0400.dll',
'msvcp120_clr0400.dll',
'msvcp60.dll',
'msvcr100_clr0400.dll',
'msvcr110_clr0400.dll',
'msvcr120_clr0400.dll',
'msvcrt.dll',
'msvcrt20.dll',
'msvcrt40.dll',
'msvfw32.dll',
'msvidc32.dll',
'msvidctl.dll',
'mswdat10.dll',
'mswmdm.dll',
'mswsock.dll',
'mswstr10.dll',
'msxbde40.dll',
'msxml3.dll',
'msxml3r.dll',
'msxml6.dll',
'msxml6r.dll',
'msyuv.dll',
'mtxclu.dll',
'mtxdm.dll',
'mtxex.dll',
'mtxlegih.dll',
'mtxoci.dll',
'muifontsetup.dll',
'mycomput.dll',
'mydocs.dll',
'napcrypt.dll',
'napdsnap.dll',
'naphlpr.dll',
'napinsp.dll',
'napipsec.dll',
'napmontr.dll',
'nativehooks.dll',
'naturallanguage6.dll',
'ncdprop.dll',
'nci.dll',
'ncobjapi.dll',
'ncrypt.dll',
'ncryptui.dll',
'ncsi.dll',
'nddeapi.dll',
'ndfapi.dll',
'ndfetw.dll',
'ndfhcdiscovery.dll',
'ndiscapcfg.dll',
'ndishc.dll',
'ndproxystub.dll',
'negoexts.dll',
'netapi32.dll',
'netbios.dll',
'netcenter.dll',
'netcfgx.dll',
'netcorehc.dll',
'netdiagfx.dll',
'netevent.dll',
'netfxperf.dll',
'neth.dll',
'netid.dll',
'netiohlp.dll',
'netjoin.dll',
'netlogon.dll',
'netmsg.dll',
'netplwiz.dll',
'netprof.dll',
'netprofm.dll',
'netshell.dll',
'netutils.dll',
'networkexplorer.dll',
'networkitemfactory.dll',
'networkmap.dll',
'newdev.dll',
'nlaapi.dll',
'nlhtml.dll',
'nlmgp.dll',
'nlmsprep.dll',
'nlsbres.dll',
'nlsdata0000.dll',
'nlsdata0001.dll',
'nlsdata0002.dll',
'nlsdata0003.dll',
'nlsdata0007.dll',
'nlsdata0009.dll',
'nlsdata000a.dll',
'nlsdata000c.dll',
'nlsdata000d.dll',
'nlsdata000f.dll',
'nlsdata0010.dll',
'nlsdata0011.dll',
'nlsdata0013.dll',
'nlsdata0018.dll',
'nlsdata0019.dll',
'nlsdata001a.dll',
'nlsdata001b.dll',
'nlsdata001d.dll',
'nlsdata0020.dll',
'nlsdata0021.dll',
'nlsdata0022.dll',
'nlsdata0024.dll',
'nlsdata0026.dll',
'nlsdata0027.dll',
'nlsdata002a.dll',
'nlsdata0039.dll',
'nlsdata003e.dll',
'nlsdata0045.dll',
'nlsdata0046.dll',
'nlsdata0047.dll',
'nlsdata0049.dll',
'nlsdata004a.dll',
'nlsdata004b.dll',
'nlsdata004c.dll',
'nlsdata004e.dll',
'nlsdata0414.dll',
'nlsdata0416.dll',
'nlsdata0816.dll',
'nlsdata081a.dll',
'nlsdata0c1a.dll',
'nlsdl.dll',
'nlslexicons0001.dll',
'nlslexicons0002.dll',
'nlslexicons0003.dll',
'nlslexicons0007.dll',
'nlslexicons0009.dll',
'nlslexicons000a.dll',
'nlslexicons000c.dll',
'nlslexicons000d.dll',
'nlslexicons000f.dll',
'nlslexicons0010.dll',
'nlslexicons0011.dll',
'nlslexicons0013.dll',
'nlslexicons0018.dll',
'nlslexicons0019.dll',
'nlslexicons001a.dll',
'nlslexicons001b.dll',
'nlslexicons001d.dll',
'nlslexicons0020.dll',
'nlslexicons0021.dll',
'nlslexicons0022.dll',
'nlslexicons0024.dll',
'nlslexicons0026.dll',
'nlslexicons0027.dll',
'nlslexicons002a.dll',
'nlslexicons0039.dll',
'nlslexicons003e.dll',
'nlslexicons0045.dll',
'nlslexicons0046.dll',
'nlslexicons0047.dll',
'nlslexicons0049.dll',
'nlslexicons004a.dll',
'nlslexicons004b.dll',
'nlslexicons004c.dll',
'nlslexicons004e.dll',
'nlslexicons0414.dll',
'nlslexicons0416.dll',
'nlslexicons0816.dll',
'nlslexicons081a.dll',
'nlslexicons0c1a.dll',
'nlsmodels0011.dll',
'normaliz.dll',
'npmproxy.dll',
'nshhttp.dll',
'nshipsec.dll',
'nshwfp.dll',
'nsi.dll',
'ntdll.dll',
'ntdsapi.dll',
'ntlanman.dll',
'ntlanui2.dll',
'ntmarta.dll',
'ntprint.dll',
'ntshrui.dll',
'ntvdm64.dll',
'objsel.dll',
'occache.dll',
'ocsetapi.dll',
'odbc32.dll',
'odbc32gt.dll',
'odbcbcp.dll',
'odbcconf.dll',
'odbccp32.dll',
'odbccr32.dll',
'odbccu32.dll',
'odbcint.dll',
'odbcji32.dll',
'odbcjt32.dll',
'odbctrac.dll',
'oddbse32.dll',
'odexl32.dll',
'odfox32.dll',
'odpdx32.dll',
'odtext32.dll',
'offfilt.dll',
'ogldrv.dll',
'ole2.dll',
'ole2disp.dll',
'ole2nls.dll',
'ole32.dll',
'oleacc.dll',
'oleacchooks.dll',
'oleaccrc.dll',
'oleaut32.dll',
'olecli32.dll',
'oledlg.dll',
'oleprn.dll',
'olepro32.dll',
'oleres.dll',
'olesvr32.dll',
'olethk32.dll',
'onex.dll',
'onexui.dll',
'onlineidcpl.dll',
'oobefldr.dll',
'opcservices.dll',
'opengl32.dll',
'osbaseln.dll',
'osuninst.dll',
'p2p.dll',
'p2pcollab.dll',
'p2pgraph.dll',
'p2pnetsh.dll',
'packager.dll',
'panmap.dll',
'pautoenr.dll',
'pcaui.dll',
'pcwum.dll',
'pdh.dll',
'pdhui.dll',
'peerdist.dll',
'peerdistsh.dll',
'perfcentercpl.dll',
'perfctrs.dll',
'perfdisk.dll',
'perfnet.dll',
'perfos.dll',
'perfproc.dll',
'perfts.dll',
'photometadatahandler.dll',
'photowiz.dll',
'pid.dll',
'pidgenx.dll',
'pifmgr.dll',
'pku2u.dll',
'pla.dll',
'playsndsrv.dll',
'pmcsnap.dll',
'pngfilt.dll',
'pnidui.dll',
'pnpsetup.dll',
'pnrpnsp.dll',
'polstore.dll',
'portabledeviceapi.dll',
'portabledeviceclassextension.dll',
'portabledeviceconnectapi.dll',
'portabledevicestatus.dll',
'portabledevicesyncprovider.dll',
'portabledevicetypes.dll',
'portabledevicewiacompat.dll',
'portabledevicewmdrm.dll',
'pots.dll',
'powercpl.dll',
'powrprof.dll',
'ppcsnap.dll',
'presentationcffrasterizernative_v0300.dll',
'presentationhostproxy.dll',
'presentationnative_v0300.dll',
'prflbmsg.dll',
'printui.dll',
'prncache.dll',
'prnfldr.dll',
'prnntfy.dll',
'prntvpt.dll',
'profapi.dll',
'propsys.dll',
'provsvc.dll',
'provthrd.dll',
'psapi.dll',
'psbase.dll',
'pshed.dll',
'psisdecd.dll',
'pstorec.dll',
'pstorsvc.dll',
'puiapi.dll',
'puiobj.dll',
'pwrshplugin.dll',
'qagent.dll',
'qasf.dll',
'qcap.dll',
'qcliprov.dll',
'qdv.dll',
'qdvd.dll',
'qedit.dll',
'qedwipes.dll',
'qmgrprxy.dll',
'qshvhost.dll',
'qsvrmgmt.dll',
'quartz.dll',
'query.dll',
'qutil.dll',
'qwave.dll',
'racengn.dll',
'racpldlg.dll',
'radardt.dll',
'radarrs.dll',
'rasadhlp.dll',
'rasapi32.dll',
'rascfg.dll',
'raschap.dll',
'rasctrs.dll',
'rasdiag.dll',
'rasdlg.dll',
'rasgcw.dll',
'rasman.dll',
'rasmm.dll',
'rasmontr.dll',
'rasmxs.dll',
'rasplap.dll',
'rasppp.dll',
'rasser.dll',
'rastapi.dll',
'rastls.dll',
'rdpcore.dll',
'rdpd3d.dll',
'rdpencom.dll',
'rdpendp.dll',
'rdprefdrvapi.dll',
'rdvgumd32.dll',
'reagent.dll',
'regapi.dll',
'regctrl.dll',
'remotepg.dll',
'resampledmo.dll',
'resutils.dll',
'rgb9rast.dll',
'riched20.dll',
'riched32.dll',
'rnr20.dll',
'rpcdiag.dll',
'rpchttp.dll',
'rpcndfp.dll',
'rpcns4.dll',
'rpcnsh.dll',
'rpcrt4.dll',
'rpcrtremote.dll',
'rsaenh.dll',
'rshx32.dll',
'rstrtmgr.dll',
'rtffilt.dll',
'rtm.dll',
'rtutils.dll',
'samcli.dll',
'samlib.dll',
'sampleres.dll',
'sas.dll',
'sbe.dll',
'sbeio.dll',
'sberes.dll',
'scansetting.dll',
'scarddlg.dll',
'scecli.dll',
'scesrv.dll',
'schannel.dll',
'schedcli.dll',
'scksp.dll',
'scripto.dll',
'scrobj.dll',
'scrptadm.dll',
'scrrun.dll',
'sdiageng.dll',
'sdiagprv.dll',
'sdohlp.dll',
'searchfolder.dll',
'sechost.dll',
'secproc.dll',
'secproc_isv.dll',
'secproc_ssp.dll',
'secproc_ssp_isv.dll',
'secur32.dll',
'security.dll',
'sendmail.dll',
'sens.dll',
'sensapi.dll',
'sensorsapi.dll',
'sensorscpl.dll',
'serialui.dll',
'serwvdrv.dll',
'sessenv.dll',
'setupapi.dll',
'setupcln.dll',
'sfc.dll',
'sfc_os.dll',
'shacct.dll',
'shdocvw.dll',
'shell32.dll',
'shellstyle.dll',
'shfolder.dll',
'shgina.dll',
'shimeng.dll',
'shimgvw.dll',
'shlwapi.dll',
'shpafact.dll',
'shsetup.dll',
'shsvcs.dll',
'shunimpl.dll',
'shwebsvc.dll',
'signdrv.dll',
'sisbkup.dll',
'slc.dll',
'slcext.dll',
'slwga.dll',
'smartcardcredentialprovider.dll',
'smbhelperclass.dll',
'sndvolsso.dll',
'snmpapi.dll',
'softkbd.dll',
'softpub.dll',
'sortserver2003compat.dll',
'sortwindows6compat.dll',
'spbcd.dll',
'spfileq.dll',
'spinf.dll',
'spnet.dll',
'spopk.dll',
'spp.dll',
'sppc.dll',
'sppcc.dll',
'sppcext.dll',
'sppcomapi.dll',
'sppcommdlg.dll',
'sppinst.dll',
'sppwmi.dll',
'spwinsat.dll',
'spwizeng.dll',
'spwizimg.dll',
'spwizres.dll',
'spwmp.dll',
'sqlceoledb30.dll',
'sqlceqp30.dll',
'sqlcese30.dll',
'sqlsrv32.dll',
'sqlunirl.dll',
'sqlwid.dll',
'sqlwoa.dll',
'sqmapi.dll',
'srchadmin.dll',
'srclient.dll',
'srhelper.dll',
'srpuxnativesnapin.dll',
'srvcli.dll',
'sscore.dll',
'ssdpapi.dll',
'sspicli.dll',
'ssshim.dll',
'stclient.dll',
'sti.dll',
'stobject.dll',
'storage.dll',
'storagecontexthandler.dll',
'storprop.dll',
'structuredquery.dll',
'sud.dll',
'sxproxy.dll',
'sxs.dll',
'sxshared.dll',
'sxsstore.dll',
'synccenter.dll',
'synceng.dll',
'synchostps.dll',
'syncinfrastructure.dll',
'syncinfrastructureps.dll',
'syncreg.dll',
'syncui.dll',
'syssetup.dll',
'systemcpl.dll',
't2embed.dll',
'tapi3.dll',
'tapi32.dll',
'tapimigplugin.dll',
'tapiperf.dll',
'tapisrv.dll',
'tapisysprep.dll',
'tapiui.dll',
'taskcomp.dll',
'taskschd.dll',
'taskschdps.dll',
'tbs.dll',
'tcpipcfg.dll',
'tcpmonui.dll',
'tdh.dll',
'termmgr.dll',
'thawbrkr.dll',
'themecpl.dll',
'themeui.dll',
'thumbcache.dll',
'timedatemuicallback.dll',
'tlscsp.dll',
'tpmcompc.dll',
'tquery.dll',
'traffic.dll',
'trapi.dll',
'tsbyuv.dll',
'tschannel.dll',
'tsgqec.dll',
'tsmf.dll',
'tspkg.dll',
'tsworkspace.dll',
'tvratings.dll',
'twext.dll',
'txflog.dll',
'txfw32.dll',
'typelib.dll',
'tzres.dll',
'ubpm.dll',
'ucmhc.dll',
'ucrtbase.dll',
'udhisapi.dll',
'uexfat.dll',
'ufat.dll',
'uianimation.dll',
'uiautomationcore.dll',
'uicom.dll',
'uiribbon.dll',
'uiribbonres.dll',
'ulib.dll',
'umdmxfrm.dll',
'unimdmat.dll',
'uniplat.dll',
'untfs.dll',
'upnp.dll',
'upnphost.dll',
'ureg.dll',
'url.dll',
'urlmon.dll',
'usbceip.dll',
'usbperf.dll',
'usbui.dll',
'user32.dll',
'useraccountcontrolsettings.dll',
'usercpl.dll',
'userenv.dll',
'usp10.dll',
'utildll.dll',
'uudf.dll',
'uxinit.dll',
'uxlib.dll',
'uxlibres.dll',
'uxtheme.dll',
'van.dll',
'vault.dll',
'vaultcli.dll',
'vbajet32.dll',
'vbscript.dll',
'vdmdbg.dll',
'vdsbas.dll',
'vdsdyn.dll',
'vdsvd.dll',
'vds_ps.dll',
'verifier.dll',
'version.dll',
'vfpodbc.dll',
'vfwwdm32.dll',
'vidreszr.dll',
'virtdisk.dll',
'vpnikeapi.dll',
'vssapi.dll',
'vsstrace.dll',
'vss_ps.dll',
'w32topl.dll',
'wabsyncprovider.dll',
'wavemsp.dll',
'wbemcomn.dll',
'wcnapi.dll',
'wcncsvc.dll',
'wcneapauthproxy.dll',
'wcneappeerproxy.dll',
'wcnwiz.dll',
'wcspluginservice.dll',
'wdc.dll',
'wdi.dll',
'wdigest.dll',
'wdscore.dll',
'webcheck.dll',
'webclnt.dll',
'webio.dll',
'webservices.dll',
'wecapi.dll',
'wer.dll',
'werdiagcontroller.dll',
'werui.dll',
'wevtapi.dll',
'wevtfwd.dll',
'wfapigp.dll',
'wfhc.dll',
'whealogr.dll',
'whhelper.dll',
'wiaaut.dll',
'wiadefui.dll',
'wiadss.dll',
'wiaextensionhost64.dll',
'wiascanprofiles.dll',
'wiashext.dll',
'wiatrace.dll',
'wiavideo.dll',
'wimgapi.dll',
'win32spl.dll',
'winbio.dll',
'winbrand.dll',
'wincredprovider.dll',
'windowscodecs.dll',
'windowscodecsext.dll',
'winfax.dll',
'winhttp.dll',
'wininet.dll',
'winipsec.dll',
'winmm.dll',
'winnsi.dll',
'winrnr.dll',
'winrscmd.dll',
'winrsmgr.dll',
'winrssrv.dll',
'winsatapi.dll',
'winscard.dll',
'winshfhc.dll',
'winsockhc.dll',
'winsrpc.dll',
'winsta.dll',
'winsync.dll',
'winsyncmetastore.dll',
'winsyncproviders.dll',
'wintrust.dll',
'winusb.dll',
'wkscli.dll',
'wksprtps.dll',
'wlanapi.dll',
'wlancfg.dll',
'wlanconn.dll',
'wlandlg.dll',
'wlangpui.dll',
'wlanhlp.dll',
'wlaninst.dll',
'wlanmm.dll',
'wlanmsm.dll',
'wlanpref.dll',
'wlansec.dll',
'wlanui.dll',
'wlanutil.dll',
'wldap32.dll',
'wlgpclnt.dll',
'wls0wndh.dll',
'wmadmod.dll',
'wmadmoe.dll',
'wmasf.dll',
'wmcodecdspps.dll',
'wmdmlog.dll',
'wmdmps.dll',
'wmdrmdev.dll',
'wmdrmnet.dll',
'wmdrmsdk.dll',
'wmerror.dll',
'wmi.dll',
'wmidx.dll',
'wmiprop.dll',
'wmnetmgr.dll',
'wmp.dll',
'wmpcm.dll',
'wmpdui.dll',
'wmpdxm.dll',
'wmpeffects.dll',
'wmpencen.dll',
'wmphoto.dll',
'wmploc.dll',
'wmpmde.dll',
'wmpps.dll',
'wmpshell.dll',
'wmpsrcwp.dll',
'wmsgapi.dll',
'wmspdmod.dll',
'wmspdmoe.dll',
'wmvcore.dll',
'wmvdecod.dll',
'wmvdspa.dll',
'wmvencod.dll',
'wmvsdecd.dll',
'wmvsencd.dll',
'wmvxencd.dll',
'wow32.dll',
'wpc.dll',
'wpcao.dll',
'wpcsvc.dll',
'wpdshext.dll',
'wpdshserviceobj.dll',
'wpdsp.dll',
'wpdwcn.dll',
'ws2help.dll',
'ws2_32.dll',
'wscapi.dll',
'wscinterop.dll',
'wscisvif.dll',
'wscmisetup.dll',
'wscproxystub.dll',
'wsdapi.dll',
'wsdchngr.dll',
'wsecedit.dll',
'wshbth.dll',
'wshcon.dll',
'wshelper.dll',
'wshext.dll',
'wship6.dll',
'wshirda.dll',
'wshqos.dll',
'wshrm.dll',
'wshtcpip.dll',
'wsmanmigrationplugin.dll',
'wsmauto.dll',
'wsmplpxy.dll',
'wsmres.dll',
'wsmsvc.dll',
'wsmwmipl.dll',
'wsnmp32.dll',
'wsock32.dll',
'wtsapi32.dll',
'wuapi.dll',
'wudriver.dll',
'wups.dll',
'wuwebv.dll',
'wvc.dll',
'wwanapi.dll',
'wwapi.dll',
'wzcdlg.dll',
'xinput9_1_0.dll',
'xmlfilter.dll',
'xmllite.dll',
'xmlprovi.dll',
'xolehlp.dll',
'xpsfilt.dll',
'xpsgdiconverter.dll',
'xpsprint.dll',
'xpsrasterservice.dll',
'xpsservices.dll',
'xpsshhdr.dll',
'xpssvcs.dll',
'xwizards.dll',
'xwreg.dll',
'xwtpdui.dll',
'xwtpw32.dll',
'zipfldr.dll',
'msacm32.drv',
'wdmaud.drv',
'winspool.drv',
}
# ignore_names_64 is a set containing the lowercase names of all DLLs that can
# be assumed to be present on 64-bit Windows 7 or later. These are all the files
# with extension .dll or .drv found in C:\Windows\System32 on a vanilla Windows
# 7 Ultimate SP1 64-bit installation.
ignore_names_64 = {
'aaclient.dll',
'accessibilitycpl.dll',
'acctres.dll',
'acledit.dll',
'aclui.dll',
'acppage.dll',
'actioncenter.dll',
'actioncentercpl.dll',
'activeds.dll',
'actxprxy.dll',
'admtmpl.dll',
'adprovider.dll',
'adsldp.dll',
'adsldpc.dll',
'adsmsext.dll',
'adsnt.dll',
'adtschema.dll',
'advapi32.dll',
'advpack.dll',
'aecache.dll',
'aeevts.dll',
'alttab.dll',
'amstream.dll',
'amxread.dll',
'apds.dll',
'api-ms-win-core-file-l1-2-0.dll',
'api-ms-win-core-file-l2-1-0.dll',
'api-ms-win-core-localization-l1-2-0.dll',
'api-ms-win-core-processthreads-l1-1-1.dll',
'api-ms-win-core-synch-l1-2-0.dll',
'api-ms-win-core-timezone-l1-1-0.dll',
'api-ms-win-core-xstate-l2-1-0.dll',
'api-ms-win-crt-conio-l1-1-0.dll',
'api-ms-win-crt-convert-l1-1-0.dll',
'api-ms-win-crt-environment-l1-1-0.dll',
'api-ms-win-crt-filesystem-l1-1-0.dll',
'api-ms-win-crt-heap-l1-1-0.dll',
'api-ms-win-crt-locale-l1-1-0.dll',
'api-ms-win-crt-math-l1-1-0.dll',
'api-ms-win-crt-multibyte-l1-1-0.dll',
'api-ms-win-crt-private-l1-1-0.dll',
'api-ms-win-crt-process-l1-1-0.dll',
'api-ms-win-crt-runtime-l1-1-0.dll',
'api-ms-win-crt-stdio-l1-1-0.dll',
'api-ms-win-crt-string-l1-1-0.dll',
'api-ms-win-crt-time-l1-1-0.dll',
'api-ms-win-crt-utility-l1-1-0.dll',
'apilogen.dll',
'apircl.dll',
'apisetschema.dll',
'apphelp.dll',
'apphlpdm.dll',
'appidapi.dll',
'appidpolicyengineapi.dll',
'appmgmts.dll',
'appmgr.dll',
'apss.dll',
'asferror.dll',
'aspnet_counters.dll',
'asycfilt.dll',
'atl.dll',
'atmfd.dll',
'atmlib.dll',
'audiodev.dll',
'audioeng.dll',
'audiokse.dll',
'audioses.dll',
'auditnativesnapin.dll',
'auditpolicygpinterop.dll',
'auditpolmsg.dll',
'authfwcfg.dll',
'authfwgp.dll',
'authfwsnapin.dll',
'authfwwizfwk.dll',
'authui.dll',
'authz.dll',
'autoplay.dll',
'auxiliarydisplayapi.dll',
'auxiliarydisplaycpl.dll',
'avicap32.dll',
'avifil32.dll',
'avrt.dll',
'azroles.dll',
'azroleui.dll',
'azsqlext.dll',
'basecsp.dll',
'batmeter.dll',
'bcrypt.dll',
'bcryptprimitives.dll',
'bidispl.dll',
'biocredprov.dll',
'bitsperf.dll',
'bitsprx2.dll',
'bitsprx3.dll',
'bitsprx4.dll',
'bitsprx5.dll',
'bitsprx6.dll',
'blackbox.dll',
'bootvid.dll',
'browcli.dll',
'browseui.dll',
'btpanui.dll',
'bwcontexthandler.dll',
'bwunpairelevated.dll',
'cabinet.dll',
'cabview.dll',
'capiprovider.dll',
'capisp.dll',
'catsrv.dll',
'catsrvps.dll',
'catsrvut.dll',
'cca.dll',
'cdosys.dll',
'certcli.dll',
'certcredprovider.dll',
'certenc.dll',
'certenroll.dll',
'certenrollui.dll',
'certmgr.dll',
'certpoleng.dll',
'cewmdm.dll',
'cfgbkend.dll',
'cfgmgr32.dll',
'chsbrkr.dll',
'chtbrkr.dll',
'chxreadingstringime.dll',
'cic.dll',
'clb.dll',
'clbcatq.dll',
'clfsw32.dll',
'cliconfg.dll',
'clusapi.dll',
'cmcfg32.dll',
'cmdial32.dll',
'cmicryptinstall.dll',
'cmifw.dll',
'cmipnpinstall.dll',
'cmlua.dll',
'cmpbk32.dll',
'cmstplua.dll',
'cmutil.dll',
'cngaudit.dll',
'cngprovider.dll',
'cnvfat.dll',
'colbact.dll',
'colorcnv.dll',
'colorui.dll',
'comcat.dll',
'comctl32.dll',
'comdlg32.dll',
'compobj.dll',
'compstui.dll',
'comrepl.dll',
'comres.dll',
'comsnap.dll',
'comsvcs.dll',
'comuid.dll',
'connect.dll',
'console.dll',
'cpfilters.dll',
'credssp.dll',
'credui.dll',
'crtdll.dll',
'crypt32.dll',
'cryptbase.dll',
'cryptdlg.dll',
'cryptdll.dll',
'cryptext.dll',
'cryptnet.dll',
'cryptsp.dll',
'cryptsvc.dll',
'cryptui.dll',
'cryptxml.dll',
'cscapi.dll',
'cscdll.dll',
'cscobj.dll',
'ctl3d32.dll',
'c_g18030.dll',
'c_is2022.dll',
'c_iscii.dll',
'd2d1.dll',
'd3d10.dll',
'd3d10core.dll',
'd3d10level9.dll',
'd3d10warp.dll',
'd3d10_1.dll',
'd3d10_1core.dll',
'd3d11.dll',
'd3d8.dll',
'd3d8thk.dll',
'd3d9.dll',
'd3dcompiler_47.dll',
'd3dim.dll',
'd3dim700.dll',
'd3dramp.dll',
'd3dxof.dll',
'dataclen.dll',
'davclnt.dll',
'davhlpr.dll',
'dbgeng.dll',
'dbghelp.dll',
'dbnetlib.dll',
'dbnmpntw.dll',
'dciman32.dll',
'ddaclsys.dll',
'ddoiproxy.dll',
'ddores.dll',
'ddraw.dll',
'ddrawex.dll',
'defaultlocationcpl.dll',
'deskadp.dll',
'deskmon.dll',
'deskperf.dll',
'devenum.dll',
'devicecenter.dll',
'devicedisplaystatusmanager.dll',
'devicemetadataparsers.dll',
'devicepairing.dll',
'devicepairingfolder.dll',
'devicepairinghandler.dll',
'devicepairingproxy.dll',
'deviceuxres.dll',
'devmgr.dll',
'devobj.dll',
'devrtl.dll',
'dfscli.dll',
'dfshim.dll',
'dfsshlex.dll',
'dhcpcmonitor.dll',
'dhcpcore.dll',
'dhcpcore6.dll',
'dhcpcsvc.dll',
'dhcpcsvc6.dll',
'dhcpqec.dll',
'dhcpsapi.dll',
'difxapi.dll',
'dimsjob.dll',
'dimsroam.dll',
'dinput.dll',
'dinput8.dll',
'diskcopy.dll',
'dispex.dll',
'display.dll',
'dmband.dll',
'dmcompos.dll',
'dmdlgs.dll',
'dmdskmgr.dll',
'dmdskres.dll',
'dmdskres2.dll',
'dmime.dll',
'dmintf.dll',
'dmloader.dll',
'dmocx.dll',
'dmrc.dll',
'dmscript.dll',
'dmstyle.dll',
'dmsynth.dll',
'dmusic.dll',
'dmutil.dll',
'dmvdsitf.dll',
'dnsapi.dll',
'dnscmmc.dll',
'docprop.dll',
'dot3api.dll',
'dot3cfg.dll',
'dot3dlg.dll',
'dot3gpclnt.dll',
'dot3gpui.dll',
'dot3hc.dll',
'dot3msm.dll',
'dot3ui.dll',
'dpapiprovider.dll',
'dplayx.dll',
'dpmodemx.dll',
'dpnaddr.dll',
'dpnathlp.dll',
'dpnet.dll',
'dpnhpast.dll',
'dpnhupnp.dll',
'dpnlobby.dll',
'dpwsockx.dll',
'dpx.dll',
'drmmgrtn.dll',
'drmv2clt.dll',
'drprov.dll',
'drt.dll',
'drtprov.dll',
'drttransport.dll',
'drvstore.dll',
'ds32gt.dll',
'dsauth.dll',
'dsdmo.dll',
'dshowrdpfilter.dll',
'dskquota.dll',
'dskquoui.dll',
'dsound.dll',
'dsprop.dll',
'dsquery.dll',
'dsrole.dll',
'dssec.dll',
'dssenh.dll',
'dsuiext.dll',
'dswave.dll',
'dtsh.dll',
'dui70.dll',
'duser.dll',
'dwmapi.dll',
'dwmcore.dll',
'dwrite.dll',
'dxdiagn.dll',
'dxgi.dll',
'dxmasf.dll',
'dxptaskringtone.dll',
'dxptasksync.dll',
'dxtmsft.dll',
'dxtrans.dll',
'dxva2.dll',
'eapp3hst.dll',
'eappcfg.dll',
'eappgnui.dll',
'eapphost.dll',
'eappprxy.dll',
'eapqec.dll',
'efsadu.dll',
'efscore.dll',
'efsutil.dll',
'ehstorapi.dll',
'ehstorpwdmgr.dll',
'ehstorshell.dll',
'els.dll',
'elscore.dll',
'elshyph.dll',
'elslad.dll',
'elstrans.dll',
'encapi.dll',
'encdec.dll',
'eqossnap.dll',
'es.dll',
'esent.dll',
'esentprf.dll',
'eventcls.dll',
'evr.dll',
'explorerframe.dll',
'expsrv.dll',
'f3ahvoas.dll',
'faultrep.dll',
'fdbth.dll',
'fdbthproxy.dll',
'fde.dll',
'fdeploy.dll',
'fdpnp.dll',
'fdproxy.dll',
'fdssdp.dll',
'fdwcn.dll',
'fdwnet.dll',
'fdwsd.dll',
'feclient.dll',
'filemgmt.dll',
'findnetprinters.dll',
'firewallapi.dll',
'firewallcontrolpanel.dll',
'fltlib.dll',
'fmifs.dll',
'fms.dll',
'fontext.dll',
'fontsub.dll',
'fphc.dll',
'framedyn.dll',
'framedynos.dll',
'fthsvc.dll',
'fundisc.dll',
'fwcfg.dll',
'fwpuclnt.dll',
'fwremotesvr.dll',
'fxsapi.dll',
'fxscom.dll',
'fxscomex.dll',
'fxsext32.dll',
'fxsresm.dll',
'fxsxp32.dll',
'gameux.dll',
'gameuxlegacygdfs.dll',
'gcdef.dll',
'gdi32.dll',
'getuname.dll',
'glmf32.dll',
'glu32.dll',
'gpapi.dll',
'gpedit.dll',
'gpprefcl.dll',
'gpprnext.dll',
'gpscript.dll',
'gptext.dll',
'hbaapi.dll',
'hcproviders.dll',
'helppaneproxy.dll',
'hgcpl.dll',
'hhsetup.dll',
'hid.dll',
'hidserv.dll',
'hlink.dll',
'hnetcfg.dll',
'hnetmon.dll',
'httpapi.dll',
'htui.dll',
'ias.dll',
'iasacct.dll',
'iasads.dll',
'iasdatastore.dll',
'iashlpr.dll',
'iasmigplugin.dll',
'iasnap.dll',
'iaspolcy.dll',
'iasrad.dll',
'iasrecst.dll',
'iassam.dll',
'iassdo.dll',
'iassvcs.dll',
'icardie.dll',
'icardres.dll',
'iccvid.dll',
'icm32.dll',
'icmp.dll',
'icmui.dll',
'iconcodecservice.dll',
'icsigd.dll',
'idndl.dll',
'idstore.dll',
'ieadvpack.dll',
'ieapfltr.dll',
'iedkcs32.dll',
'ieetwproxystub.dll',
'ieframe.dll',
'iepeers.dll',
'iernonce.dll',
'iertutil.dll',
'iesetup.dll',
'iesysprep.dll',
'ieui.dll',
'ifmon.dll',
'ifsutil.dll',
'ifsutilx.dll',
'imagehlp.dll',
'imageres.dll',
'imagesp1.dll',
'imapi.dll',
'imapi2.dll',
'imapi2fs.dll',
'imgutil.dll',
'imjp10k.dll',
'imm32.dll',
'inetcomm.dll',
'inetmib1.dll',
'inetres.dll',
'infocardapi.dll',
'inked.dll',
'input.dll',
'inseng.dll',
'iologmsg.dll',
'ipbusenumproxy.dll',
'iphlpapi.dll',
'iprop.dll',
'iprtprio.dll',
'iprtrmgr.dll',
'ipsecsnp.dll',
'ipsmsnap.dll',
'ir32_32.dll',
'ir41_qc.dll',
'ir41_qcx.dll',
'ir50_32.dll',
'ir50_qc.dll',
'ir50_qcx.dll',
'irclass.dll',
'iscsicpl.dll',
'iscsidsc.dll',
'iscsied.dll',
'iscsium.dll',
'iscsiwmi.dll',
'itircl.dll',
'itss.dll',
'itvdata.dll',
'iyuv_32.dll',
'javascriptcollectionagent.dll',
'jscript.dll',
'jscript9.dll',
'jscript9diag.dll',
'jsintl.dll',
'jsproxy.dll',
'kbd101.dll',
'kbd101a.dll',
'kbd101b.dll',
'kbd101c.dll',
'kbd103.dll',
'kbd106.dll',
'kbd106n.dll',
'kbda1.dll',
'kbda2.dll',
'kbda3.dll',
'kbdal.dll',
'kbdarme.dll',
'kbdarmw.dll',
'kbdax2.dll',
'kbdaze.dll',
'kbdazel.dll',
'kbdbash.dll',
'kbdbe.dll',
'kbdbene.dll',
'kbdbgph.dll',
'kbdbgph1.dll',
'kbdbhc.dll',
'kbdblr.dll',
'kbdbr.dll',
'kbdbu.dll',
'kbdbulg.dll',
'kbdca.dll',
'kbdcan.dll',
'kbdcr.dll',
'kbdcz.dll',
'kbdcz1.dll',
'kbdcz2.dll',
'kbdda.dll',
'kbddiv1.dll',
'kbddiv2.dll',
'kbddv.dll',
'kbdes.dll',
'kbdest.dll',
'kbdfa.dll',
'kbdfc.dll',
'kbdfi.dll',
'kbdfi1.dll',
'kbdfo.dll',
'kbdfr.dll',
'kbdgae.dll',
'kbdgeo.dll',
'kbdgeoer.dll',
'kbdgeoqw.dll',
'kbdgkl.dll',
'kbdgr.dll',
'kbdgr1.dll',
'kbdgrlnd.dll',
'kbdhau.dll',
'kbdhe.dll',
'kbdhe220.dll',
'kbdhe319.dll',
'kbdheb.dll',
'kbdhela2.dll',
'kbdhela3.dll',
'kbdhept.dll',
'kbdhu.dll',
'kbdhu1.dll',
'kbdibm02.dll',
'kbdibo.dll',
'kbdic.dll',
'kbdinasa.dll',
'kbdinbe1.dll',
'kbdinbe2.dll',
'kbdinben.dll',
'kbdindev.dll',
'kbdinguj.dll',
'kbdinhin.dll',
'kbdinkan.dll',
'kbdinmal.dll',
'kbdinmar.dll',
'kbdinori.dll',
'kbdinpun.dll',
'kbdintam.dll',
'kbdintel.dll',
'kbdinuk2.dll',
'kbdir.dll',
'kbdit.dll',
'kbdit142.dll',
'kbdiulat.dll',
'kbdjpn.dll',
'kbdkaz.dll',
'kbdkhmr.dll',
'kbdkor.dll',
'kbdkyr.dll',
'kbdla.dll',
'kbdlao.dll',
'kbdlk41a.dll',
'kbdlt.dll',
'kbdlt1.dll',
'kbdlt2.dll',
'kbdlv.dll',
'kbdlv1.dll',
'kbdmac.dll',
'kbdmacst.dll',
'kbdmaori.dll',
'kbdmlt47.dll',
'kbdmlt48.dll',
'kbdmon.dll',
'kbdmonmo.dll',
'kbdne.dll',
'kbdnec.dll',
'kbdnec95.dll',
'kbdnecat.dll',
'kbdnecnt.dll',
'kbdnepr.dll',
'kbdno.dll',
'kbdno1.dll',
'kbdnso.dll',
'kbdpash.dll',
'kbdpl.dll',
'kbdpl1.dll',
'kbdpo.dll',
'kbdro.dll',
'kbdropr.dll',
'kbdrost.dll',
'kbdru.dll',
'kbdru1.dll',
'kbdsf.dll',
'kbdsg.dll',
'kbdsl.dll',
'kbdsl1.dll',
'kbdsmsfi.dll',
'kbdsmsno.dll',
'kbdsn1.dll',
'kbdsorex.dll',
'kbdsors1.dll',
'kbdsorst.dll',
'kbdsp.dll',
'kbdsw.dll',
'kbdsw09.dll',
'kbdsyr1.dll',
'kbdsyr2.dll',
'kbdtajik.dll',
'kbdtat.dll',
'kbdth0.dll',
'kbdth1.dll',
'kbdth2.dll',
'kbdth3.dll',
'kbdtiprc.dll',
'kbdtuf.dll',
'kbdtuq.dll',
'kbdturme.dll',
'kbdughr.dll',
'kbdughr1.dll',
'kbduk.dll',
'kbdukx.dll',
'kbdur.dll',
'kbdur1.dll',
'kbdurdu.dll',
'kbdus.dll',
'kbdusa.dll',
'kbdusl.dll',
'kbdusr.dll',
'kbdusx.dll',
'kbduzb.dll',
'kbdvntc.dll',
'kbdwol.dll',
'kbdyak.dll',
'kbdyba.dll',
'kbdycc.dll',
'kbdycl.dll',
'kerberos.dll',
'kernel32.dll',
'kernelbase.dll',
'keyiso.dll',
'keymgr.dll',
'korwbrkr.dll',
'ksuser.dll',
'ktmw32.dll',
'l2gpstore.dll',
'l2nacp.dll',
'l2sechc.dll',
'laprxy.dll',
'licmgr10.dll',
'linkinfo.dll',
'loadperf.dll',
'localsec.dll',
'locationapi.dll',
'loghours.dll',
'logoncli.dll',
'lpk.dll',
'lsmproxy.dll',
'luainstall.dll',
'lz32.dll',
'magnification.dll',
'mapi32.dll',
'mapistub.dll',
'mcewmdrmndbootstrap.dll',
'mciavi32.dll',
'mcicda.dll',
'mciqtz32.dll',
'mciseq.dll',
'mciwave.dll',
'mctres.dll',
'mdminst.dll',
'mediametadatahandler.dll',
'mf.dll',
'mf3216.dll',
'mfaacenc.dll',
'mfc40.dll',
'mfc40u.dll',
'mfc42.dll',
'mfc42u.dll',
'mfcsubs.dll',
'mfds.dll',
'mfdvdec.dll',
'mferror.dll',
'mfh264enc.dll',
'mfmjpegdec.dll',
'mfplat.dll',
'mfplay.dll',
'mfps.dll',
'mfreadwrite.dll',
'mfvdsp.dll',
'mfwmaaec.dll',
'mgmtapi.dll',
'midimap.dll',
'migisol.dll',
'miguiresource.dll',
'mimefilt.dll',
'mlang.dll',
'mmcbase.dll',
'mmci.dll',
'mmcico.dll',
'mmcndmgr.dll',
'mmcshext.dll',
'mmdevapi.dll',
'mmres.dll',
'modemui.dll',
'moricons.dll',
'mp3dmod.dll',
'mp43decd.dll',
'mp4sdecd.dll',
'mpg4decd.dll',
'mpr.dll',
'mprapi.dll',
'mprddm.dll',
'mprdim.dll',
'mprmsg.dll',
'msaatext.dll',
'msac3enc.dll',
'msacm32.dll',
'msafd.dll',
'msasn1.dll',
'msaudite.dll',
'mscandui.dll',
'mscat32.dll',
'msclmd.dll',
'mscms.dll',
'mscoree.dll',
'mscorier.dll',
'mscories.dll',
'mscpx32r.dll',
'mscpxl32.dll',
'msctf.dll',
'msctfmonitor.dll',
'msctfp.dll',
'msctfui.dll',
'msdadiag.dll',
'msdart.dll',
'msdelta.dll',
'msdmo.dll',
'msdrm.dll',
'msdtcprx.dll',
'msdtcuiu.dll',
'msdtcvsp1res.dll',
'msexch40.dll',
'msexcl40.dll',
'msfeeds.dll',
'msfeedsbs.dll',
'msftedit.dll',
'mshtml.dll',
'mshtmldac.dll',
'mshtmled.dll',
'mshtmler.dll',
'mshtmlmedia.dll',
'msi.dll',
'msidcrl30.dll',
'msident.dll',
'msidle.dll',
'msidntld.dll',
'msieftp.dll',
'msihnd.dll',
'msiltcfg.dll',
'msimg32.dll',
'msimsg.dll',
'msimtf.dll',
'msisip.dll',
'msjet40.dll',
'msjetoledb40.dll',
'msjint40.dll',
'msjter40.dll',
'msjtes40.dll',
'msls31.dll',
'msltus40.dll',
'msmpeg2adec.dll',
'msmpeg2enc.dll',
'msmpeg2vdec.dll',
'msnetobj.dll',
'msobjs.dll',
'msoeacct.dll',
'msoert2.dll',
'msorc32r.dll',
'msorcl32.dll',
'mspatcha.dll',
'mspbde40.dll',
'msports.dll',
'msrating.dll',
'msrd2x40.dll',
'msrd3x40.dll',
'msrdc.dll',
'msrdpwebaccess.dll',
'msrepl40.dll',
'msrle32.dll',
'msscntrs.dll',
'msscp.dll',
'mssha.dll',
'msshavmsg.dll',
'msshooks.dll',
'mssign32.dll',
'mssip32.dll',
'mssitlb.dll',
'mssph.dll',
'mssphtb.dll',
'mssprxy.dll',
'mssrch.dll',
'mssvp.dll',
'msswch.dll',
'mstask.dll',
'mstext40.dll',
'mstscax.dll',
'msutb.dll',
'msv1_0.dll',
'msvbvm60.dll',
'msvcirt.dll',
'msvcp110_clr0400.dll',
'msvcp120_clr0400.dll',
'msvcp60.dll',
'msvcr100_clr0400.dll',
'msvcr110_clr0400.dll',
'msvcr120_clr0400.dll',
'msvcrt.dll',
'msvcrt20.dll',
'msvcrt40.dll',
'msvfw32.dll',
'msvidc32.dll',
'msvidctl.dll',
'mswdat10.dll',
'mswmdm.dll',
'mswsock.dll',
'mswstr10.dll',
'msxbde40.dll',
'msxml3.dll',
'msxml3r.dll',
'msxml6.dll',
'msxml6r.dll',
'msyuv.dll',
'mtxclu.dll',
'mtxdm.dll',
'mtxex.dll',
'mtxlegih.dll',
'mtxoci.dll',
'muifontsetup.dll',
'mycomput.dll',
'mydocs.dll',
'napcrypt.dll',
'napdsnap.dll',
'naphlpr.dll',
'napinsp.dll',
'napipsec.dll',
'napmontr.dll',
'nativehooks.dll',
'naturallanguage6.dll',
'ncdprop.dll',
'nci.dll',
'ncobjapi.dll',
'ncrypt.dll',
'ncryptui.dll',
'ncsi.dll',
'nddeapi.dll',
'ndfapi.dll',
'ndfetw.dll',
'ndfhcdiscovery.dll',
'ndiscapcfg.dll',
'ndishc.dll',
'ndproxystub.dll',
'negoexts.dll',
'netapi32.dll',
'netbios.dll',
'netcenter.dll',
'netcfgx.dll',
'netcorehc.dll',
'netdiagfx.dll',
'netevent.dll',
'netfxperf.dll',
'neth.dll',
'netid.dll',
'netiohlp.dll',
'netjoin.dll',
'netlogon.dll',
'netmsg.dll',
'netplwiz.dll',
'netprof.dll',
'netprofm.dll',
'netshell.dll',
'netutils.dll',
'networkexplorer.dll',
'networkitemfactory.dll',
'networkmap.dll',
'newdev.dll',
'nlaapi.dll',
'nlhtml.dll',
'nlmgp.dll',
'nlmsprep.dll',
'nlsbres.dll',
'nlsdata0000.dll',
'nlsdata0001.dll',
'nlsdata0002.dll',
'nlsdata0003.dll',
'nlsdata0007.dll',
'nlsdata0009.dll',
'nlsdata000a.dll',
'nlsdata000c.dll',
'nlsdata000d.dll',
'nlsdata000f.dll',
'nlsdata0010.dll',
'nlsdata0011.dll',
'nlsdata0013.dll',
'nlsdata0018.dll',
'nlsdata0019.dll',
'nlsdata001a.dll',
'nlsdata001b.dll',
'nlsdata001d.dll',
'nlsdata0020.dll',
'nlsdata0021.dll',
'nlsdata0022.dll',
'nlsdata0024.dll',
'nlsdata0026.dll',
'nlsdata0027.dll',
'nlsdata002a.dll',
'nlsdata0039.dll',
'nlsdata003e.dll',
'nlsdata0045.dll',
'nlsdata0046.dll',
'nlsdata0047.dll',
'nlsdata0049.dll',
'nlsdata004a.dll',
'nlsdata004b.dll',
'nlsdata004c.dll',
'nlsdata004e.dll',
'nlsdata0414.dll',
'nlsdata0416.dll',
'nlsdata0816.dll',
'nlsdata081a.dll',
'nlsdata0c1a.dll',
'nlsdl.dll',
'nlslexicons0001.dll',
'nlslexicons0002.dll',
'nlslexicons0003.dll',
'nlslexicons0007.dll',
'nlslexicons0009.dll',
'nlslexicons000a.dll',
'nlslexicons000c.dll',
'nlslexicons000d.dll',
'nlslexicons000f.dll',
'nlslexicons0010.dll',
'nlslexicons0011.dll',
'nlslexicons0013.dll',
'nlslexicons0018.dll',
'nlslexicons0019.dll',
'nlslexicons001a.dll',
'nlslexicons001b.dll',
'nlslexicons001d.dll',
'nlslexicons0020.dll',
'nlslexicons0021.dll',
'nlslexicons0022.dll',
'nlslexicons0024.dll',
'nlslexicons0026.dll',
'nlslexicons0027.dll',
'nlslexicons002a.dll',
'nlslexicons0039.dll',
'nlslexicons003e.dll',
'nlslexicons0045.dll',
'nlslexicons0046.dll',
'nlslexicons0047.dll',
'nlslexicons0049.dll',
'nlslexicons004a.dll',
'nlslexicons004b.dll',
'nlslexicons004c.dll',
'nlslexicons004e.dll',
'nlslexicons0414.dll',
'nlslexicons0416.dll',
'nlslexicons0816.dll',
'nlslexicons081a.dll',
'nlslexicons0c1a.dll',
'nlsmodels0011.dll',
'normaliz.dll',
'npmproxy.dll',
'nshhttp.dll',
'nshipsec.dll',
'nshwfp.dll',
'nsi.dll',
'ntdll.dll',
'ntdsapi.dll',
'ntlanman.dll',
'ntlanui2.dll',
'ntmarta.dll',
'ntprint.dll',
'ntshrui.dll',
'ntvdm64.dll',
'objsel.dll',
'occache.dll',
'ocsetapi.dll',
'odbc32.dll',
'odbc32gt.dll',
'odbcbcp.dll',
'odbcconf.dll',
'odbccp32.dll',
'odbccr32.dll',
'odbccu32.dll',
'odbcint.dll',
'odbcji32.dll',
'odbcjt32.dll',
'odbctrac.dll',
'oddbse32.dll',
'odexl32.dll',
'odfox32.dll',
'odpdx32.dll',
'odtext32.dll',
'offfilt.dll',
'ogldrv.dll',
'ole2.dll',
'ole2disp.dll',
'ole2nls.dll',
'ole32.dll',
'oleacc.dll',
'oleacchooks.dll',
'oleaccrc.dll',
'oleaut32.dll',
'olecli32.dll',
'oledlg.dll',
'oleprn.dll',
'olepro32.dll',
'oleres.dll',
'olesvr32.dll',
'olethk32.dll',
'onex.dll',
'onexui.dll',
'onlineidcpl.dll',
'oobefldr.dll',
'opcservices.dll',
'opengl32.dll',
'osbaseln.dll',
'osuninst.dll',
'p2p.dll',
'p2pcollab.dll',
'p2pgraph.dll',
'p2pnetsh.dll',
'packager.dll',
'panmap.dll',
'pautoenr.dll',
'pcaui.dll',
'pcwum.dll',
'pdh.dll',
'pdhui.dll',
'peerdist.dll',
'peerdistsh.dll',
'perfcentercpl.dll',
'perfctrs.dll',
'perfdisk.dll',
'perfnet.dll',
'perfos.dll',
'perfproc.dll',
'perfts.dll',
'photometadatahandler.dll',
'photowiz.dll',
'pid.dll',
'pidgenx.dll',
'pifmgr.dll',
'pku2u.dll',
'pla.dll',
'playsndsrv.dll',
'pmcsnap.dll',
'pngfilt.dll',
'pnidui.dll',
'pnpsetup.dll',
'pnrpnsp.dll',
'polstore.dll',
'portabledeviceapi.dll',
'portabledeviceclassextension.dll',
'portabledeviceconnectapi.dll',
'portabledevicestatus.dll',
'portabledevicesyncprovider.dll',
'portabledevicetypes.dll',
'portabledevicewiacompat.dll',
'portabledevicewmdrm.dll',
'pots.dll',
'powercpl.dll',
'powrprof.dll',
'ppcsnap.dll',
'presentationcffrasterizernative_v0300.dll',
'presentationhostproxy.dll',
'presentationnative_v0300.dll',
'prflbmsg.dll',
'printui.dll',
'prncache.dll',
'prnfldr.dll',
'prnntfy.dll',
'prntvpt.dll',
'profapi.dll',
'propsys.dll',
'provsvc.dll',
'provthrd.dll',
'psapi.dll',
'psbase.dll',
'pshed.dll',
'psisdecd.dll',
'pstorec.dll',
'pstorsvc.dll',
'puiapi.dll',
'puiobj.dll',
'pwrshplugin.dll',
'qagent.dll',
'qasf.dll',
'qcap.dll',
'qcliprov.dll',
'qdv.dll',
'qdvd.dll',
'qedit.dll',
'qedwipes.dll',
'qmgrprxy.dll',
'qshvhost.dll',
'qsvrmgmt.dll',
'quartz.dll',
'query.dll',
'qutil.dll',
'qwave.dll',
'racengn.dll',
'racpldlg.dll',
'radardt.dll',
'radarrs.dll',
'rasadhlp.dll',
'rasapi32.dll',
'rascfg.dll',
'raschap.dll',
'rasctrs.dll',
'rasdiag.dll',
'rasdlg.dll',
'rasgcw.dll',
'rasman.dll',
'rasmm.dll',
'rasmontr.dll',
'rasmxs.dll',
'rasplap.dll',
'rasppp.dll',
'rasser.dll',
'rastapi.dll',
'rastls.dll',
'rdpcore.dll',
'rdpd3d.dll',
'rdpencom.dll',
'rdpendp.dll',
'rdprefdrvapi.dll',
'rdvgumd32.dll',
'reagent.dll',
'regapi.dll',
'regctrl.dll',
'remotepg.dll',
'resampledmo.dll',
'resutils.dll',
'rgb9rast.dll',
'riched20.dll',
'riched32.dll',
'rnr20.dll',
'rpcdiag.dll',
'rpchttp.dll',
'rpcndfp.dll',
'rpcns4.dll',
'rpcnsh.dll',
'rpcrt4.dll',
'rpcrtremote.dll',
'rsaenh.dll',
'rshx32.dll',
'rstrtmgr.dll',
'rtffilt.dll',
'rtm.dll',
'rtutils.dll',
'samcli.dll',
'samlib.dll',
'sampleres.dll',
'sas.dll',
'sbe.dll',
'sbeio.dll',
'sberes.dll',
'scansetting.dll',
'scarddlg.dll',
'scecli.dll',
'scesrv.dll',
'schannel.dll',
'schedcli.dll',
'scksp.dll',
'scripto.dll',
'scrobj.dll',
'scrptadm.dll',
'scrrun.dll',
'sdiageng.dll',
'sdiagprv.dll',
'sdohlp.dll',
'searchfolder.dll',
'sechost.dll',
'secproc.dll',
'secproc_isv.dll',
'secproc_ssp.dll',
'secproc_ssp_isv.dll',
'secur32.dll',
'security.dll',
'sendmail.dll',
'sens.dll',
'sensapi.dll',
'sensorsapi.dll',
'sensorscpl.dll',
'serialui.dll',
'serwvdrv.dll',
'sessenv.dll',
'setupapi.dll',
'setupcln.dll',
'sfc.dll',
'sfc_os.dll',
'shacct.dll',
'shdocvw.dll',
'shell32.dll',
'shellstyle.dll',
'shfolder.dll',
'shgina.dll',
'shimeng.dll',
'shimgvw.dll',
'shlwapi.dll',
'shpafact.dll',
'shsetup.dll',
'shsvcs.dll',
'shunimpl.dll',
'shwebsvc.dll',
'signdrv.dll',
'sisbkup.dll',
'slc.dll',
'slcext.dll',
'slwga.dll',
'smartcardcredentialprovider.dll',
'smbhelperclass.dll',
'sndvolsso.dll',
'snmpapi.dll',
'softkbd.dll',
'softpub.dll',
'sortserver2003compat.dll',
'sortwindows6compat.dll',
'spbcd.dll',
'spfileq.dll',
'spinf.dll',
'spnet.dll',
'spopk.dll',
'spp.dll',
'sppc.dll',
'sppcc.dll',
'sppcext.dll',
'sppcomapi.dll',
'sppcommdlg.dll',
'sppinst.dll',
'sppwmi.dll',
'spwinsat.dll',
'spwizeng.dll',
'spwizimg.dll',
'spwizres.dll',
'spwmp.dll',
'sqlceoledb30.dll',
'sqlceqp30.dll',
'sqlcese30.dll',
'sqlsrv32.dll',
'sqlunirl.dll',
'sqlwid.dll',
'sqlwoa.dll',
'sqmapi.dll',
'srchadmin.dll',
'srclient.dll',
'srhelper.dll',
'srpuxnativesnapin.dll',
'srvcli.dll',
'sscore.dll',
'ssdpapi.dll',
'sspicli.dll',
'ssshim.dll',
'stclient.dll',
'sti.dll',
'stobject.dll',
'storage.dll',
'storagecontexthandler.dll',
'storprop.dll',
'structuredquery.dll',
'sud.dll',
'sxproxy.dll',
'sxs.dll',
'sxshared.dll',
'sxsstore.dll',
'synccenter.dll',
'synceng.dll',
'synchostps.dll',
'syncinfrastructure.dll',
'syncinfrastructureps.dll',
'syncreg.dll',
'syncui.dll',
'syssetup.dll',
'systemcpl.dll',
't2embed.dll',
'tapi3.dll',
'tapi32.dll',
'tapimigplugin.dll',
'tapiperf.dll',
'tapisrv.dll',
'tapisysprep.dll',
'tapiui.dll',
'taskcomp.dll',
'taskschd.dll',
'taskschdps.dll',
'tbs.dll',
'tcpipcfg.dll',
'tcpmonui.dll',
'tdh.dll',
'termmgr.dll',
'thawbrkr.dll',
'themecpl.dll',
'themeui.dll',
'thumbcache.dll',
'timedatemuicallback.dll',
'tlscsp.dll',
'tpmcompc.dll',
'tquery.dll',
'traffic.dll',
'trapi.dll',
'tsbyuv.dll',
'tschannel.dll',
'tsgqec.dll',
'tsmf.dll',
'tspkg.dll',
'tsworkspace.dll',
'tvratings.dll',
'twext.dll',
'txflog.dll',
'txfw32.dll',
'typelib.dll',
'tzres.dll',
'ubpm.dll',
'ucmhc.dll',
'ucrtbase.dll',
'udhisapi.dll',
'uexfat.dll',
'ufat.dll',
'uianimation.dll',
'uiautomationcore.dll',
'uicom.dll',
'uiribbon.dll',
'uiribbonres.dll',
'ulib.dll',
'umdmxfrm.dll',
'unimdmat.dll',
'uniplat.dll',
'untfs.dll',
'upnp.dll',
'upnphost.dll',
'ureg.dll',
'url.dll',
'urlmon.dll',
'usbceip.dll',
'usbperf.dll',
'usbui.dll',
'user32.dll',
'useraccountcontrolsettings.dll',
'usercpl.dll',
'userenv.dll',
'usp10.dll',
'utildll.dll',
'uudf.dll',
'uxinit.dll',
'uxlib.dll',
'uxlibres.dll',
'uxtheme.dll',
'van.dll',
'vault.dll',
'vaultcli.dll',
'vbajet32.dll',
'vbscript.dll',
'vdmdbg.dll',
'vdsbas.dll',
'vdsdyn.dll',
'vdsvd.dll',
'vds_ps.dll',
'verifier.dll',
'version.dll',
'vfpodbc.dll',
'vfwwdm32.dll',
'vidreszr.dll',
'virtdisk.dll',
'vpnikeapi.dll',
'vssapi.dll',
'vsstrace.dll',
'vss_ps.dll',
'w32topl.dll',
'wabsyncprovider.dll',
'wavemsp.dll',
'wbemcomn.dll',
'wcnapi.dll',
'wcncsvc.dll',
'wcneapauthproxy.dll',
'wcneappeerproxy.dll',
'wcnwiz.dll',
'wcspluginservice.dll',
'wdc.dll',
'wdi.dll',
'wdigest.dll',
'wdscore.dll',
'webcheck.dll',
'webclnt.dll',
'webio.dll',
'webservices.dll',
'wecapi.dll',
'wer.dll',
'werdiagcontroller.dll',
'werui.dll',
'wevtapi.dll',
'wevtfwd.dll',
'wfapigp.dll',
'wfhc.dll',
'whealogr.dll',
'whhelper.dll',
'wiaaut.dll',
'wiadefui.dll',
'wiadss.dll',
'wiaextensionhost64.dll',
'wiascanprofiles.dll',
'wiashext.dll',
'wiatrace.dll',
'wiavideo.dll',
'wimgapi.dll',
'win32spl.dll',
'winbio.dll',
'winbrand.dll',
'wincredprovider.dll',
'windowscodecs.dll',
'windowscodecsext.dll',
'winfax.dll',
'winhttp.dll',
'wininet.dll',
'winipsec.dll',
'winmm.dll',
'winnsi.dll',
'winrnr.dll',
'winrscmd.dll',
'winrsmgr.dll',
'winrssrv.dll',
'winsatapi.dll',
'winscard.dll',
'winshfhc.dll',
'winsockhc.dll',
'winsrpc.dll',
'winsta.dll',
'winsync.dll',
'winsyncmetastore.dll',
'winsyncproviders.dll',
'wintrust.dll',
'winusb.dll',
'wkscli.dll',
'wksprtps.dll',
'wlanapi.dll',
'wlancfg.dll',
'wlanconn.dll',
'wlandlg.dll',
'wlangpui.dll',
'wlanhlp.dll',
'wlaninst.dll',
'wlanmm.dll',
'wlanmsm.dll',
'wlanpref.dll',
'wlansec.dll',
'wlanui.dll',
'wlanutil.dll',
'wldap32.dll',
'wlgpclnt.dll',
'wls0wndh.dll',
'wmadmod.dll',
'wmadmoe.dll',
'wmasf.dll',
'wmcodecdspps.dll',
'wmdmlog.dll',
'wmdmps.dll',
'wmdrmdev.dll',
'wmdrmnet.dll',
'wmdrmsdk.dll',
'wmerror.dll',
'wmi.dll',
'wmidx.dll',
'wmiprop.dll',
'wmnetmgr.dll',
'wmp.dll',
'wmpcm.dll',
'wmpdui.dll',
'wmpdxm.dll',
'wmpeffects.dll',
'wmpencen.dll',
'wmphoto.dll',
'wmploc.dll',
'wmpmde.dll',
'wmpps.dll',
'wmpshell.dll',
'wmpsrcwp.dll',
'wmsgapi.dll',
'wmspdmod.dll',
'wmspdmoe.dll',
'wmvcore.dll',
'wmvdecod.dll',
'wmvdspa.dll',
'wmvencod.dll',
'wmvsdecd.dll',
'wmvsencd.dll',
'wmvxencd.dll',
'wow32.dll',
'wpc.dll',
'wpcao.dll',
'wpcsvc.dll',
'wpdshext.dll',
'wpdshserviceobj.dll',
'wpdsp.dll',
'wpdwcn.dll',
'ws2help.dll',
'ws2_32.dll',
'wscapi.dll',
'wscinterop.dll',
'wscisvif.dll',
'wscmisetup.dll',
'wscproxystub.dll',
'wsdapi.dll',
'wsdchngr.dll',
'wsecedit.dll',
'wshbth.dll',
'wshcon.dll',
'wshelper.dll',
'wshext.dll',
'wship6.dll',
'wshirda.dll',
'wshqos.dll',
'wshrm.dll',
'wshtcpip.dll',
'wsmanmigrationplugin.dll',
'wsmauto.dll',
'wsmplpxy.dll',
'wsmres.dll',
'wsmsvc.dll',
'wsmwmipl.dll',
'wsnmp32.dll',
'wsock32.dll',
'wtsapi32.dll',
'wuapi.dll',
'wudriver.dll',
'wups.dll',
'wuwebv.dll',
'wvc.dll',
'wwanapi.dll',
'wwapi.dll',
'wzcdlg.dll',
'xinput9_1_0.dll',
'xmlfilter.dll',
'xmllite.dll',
'xmlprovi.dll',
'xolehlp.dll',
'xpsfilt.dll',
'xpsgdiconverter.dll',
'xpsprint.dll',
'xpsrasterservice.dll',
'xpsservices.dll',
'xpsshhdr.dll',
'xpssvcs.dll',
'xwizards.dll',
'xwreg.dll',
'xwtpdui.dll',
'xwtpw32.dll',
'zipfldr.dll',
'msacm32.drv',
'wdmaud.drv',
'winspool.drv',
}
# set of regular expressions for additional DLLs to ignore
ignore_regexes = {
re.compile(r'^python[0-9]+\.dll$'), # included in CPython distribution
re.compile(r'^libpypy[0-9]+-c\.dll$'), # included in PyPy distribution
re.compile(r'^api-'), # let Windows handle API sets
}
# DLLs to ignore based on Python ABI tag and platform tag. For CPython, these
# are included in their respective Python distributions. For PyPy, these are
# prerequisites for PyPy to run in the first place.
ignore_by_distribution = {
'cp27m-win32': {'msvcr90.dll'},
'cp27m-win_amd64': {'msvcr90.dll'},
'cp34m-win32': {'msvcr100.dll'},
'cp34m-win_amd64': {'msvcr100.dll'},
'cp35m-win32': {'vcruntime140.dll'},
'cp35m-win_amd64': {'vcruntime140.dll'},
'cp36m-win32': {'vcruntime140.dll'},
'cp36m-win_amd64': {'vcruntime140.dll'},
'pypy36_pp73-win32': {'vcruntime140.dll'},
'cp37m-win32': {'vcruntime140.dll'},
'cp37m-win_amd64': {'vcruntime140.dll'},
'pypy37_pp73-win32': {'vcruntime140.dll'},
'cp38-win32': {'vcruntime140.dll'},
'cp38-win_amd64': {'vcruntime140.dll', 'vcruntime140_1.dll'},
'cp39-win32': {'vcruntime140.dll'},
'cp39-win_amd64': {'vcruntime140.dll', 'vcruntime140_1.dll'},
'cp310-win32': {'vcruntime140.dll'},
'cp310-win_amd64': {'vcruntime140.dll', 'vcruntime140_1.dll'},
}
# Prefixes of DLLs whose names should not be mangled. These either are
# dependencies of DLLs that contain data after the PE file proper (and thus
# cannot be modified by machomachomangler) or already have the version in the
# filename.
no_mangle_prefixes = {
'vcruntime',
'vccorlib',
'msvcp',
'msvcr',
'concrt',
'mfc',
'vcamp',
'vcomp',
}
| 20.52514
| 80
| 0.554542
| 7,026
| 66,132
| 5.206803
| 0.243097
| 0.00503
| 0.009622
| 0.01323
| 0.969767
| 0.969767
| 0.969767
| 0.966569
| 0.966569
| 0.966569
| 0
| 0.03813
| 0.244934
| 66,132
| 3,221
| 81
| 20.531512
| 0.694497
| 0.017435
| 0
| 0.987492
| 0
| 0
| 0.602602
| 0.054284
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000313
| 0
| 0.000313
| 0.002502
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a0df3f0577390d0f6569e18967ef73e91da8be2
| 2,429
|
py
|
Python
|
tests/unit/test_base64.py
|
weslambert/unfurl
|
b9a65db6842ac9f2c2e8165fc5eebfee4f2918b3
|
[
"Apache-2.0"
] | 1
|
2020-07-23T18:42:44.000Z
|
2020-07-23T18:42:44.000Z
|
tests/unit/test_base64.py
|
weslambert/unfurl
|
b9a65db6842ac9f2c2e8165fc5eebfee4f2918b3
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_base64.py
|
weslambert/unfurl
|
b9a65db6842ac9f2c2e8165fc5eebfee4f2918b3
|
[
"Apache-2.0"
] | 2
|
2020-07-06T20:27:07.000Z
|
2020-07-23T18:42:49.000Z
|
from unfurl import Unfurl
import unittest
class TestBase64(unittest.TestCase):
def test_padded_b64_ascii(self):
""" Test a simple ASCII string that is base64-encoded."""
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='dGVzdHl0ZXN0dGVzdA==')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 2)
self.assertEqual(test.total_nodes, 2)
# confirm that it was detected as b64
self.assertEqual('b64', test.nodes[2].data_type)
# confirm that text decoded correctly
self.assertEqual('testytesttest', test.nodes[2].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
def test_unpadded_b64_ascii(self):
""" Test a simple ASCII string that is base64-encoded, with padding removed."""
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='dGVzdHl0ZXN0dGVzdA')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 2)
self.assertEqual(test.total_nodes, 2)
# confirm that it was detected as b64
self.assertEqual('b64', test.nodes[2].data_type)
# confirm that text decoded correctly
self.assertEqual('testytesttest', test.nodes[2].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
def test_incorrect_padded_b64_ascii(self):
""" Test a simple ASCII string that is base64-encoded, with incorrect padding"""
test = Unfurl()
test.add_to_queue(
data_type='url', key=None,
value='dGVzdHl0ZXN0dGVzdA=')
test.parse_queue()
# check the number of nodes
self.assertEqual(len(test.nodes.keys()), 2)
self.assertEqual(test.total_nodes, 2)
# confirm that it was detected as b64
self.assertEqual('b64', test.nodes[2].data_type)
# confirm that text decoded correctly
self.assertEqual('testytesttest', test.nodes[2].value)
# make sure the queue finished empty
self.assertTrue(test.queue.empty())
self.assertEqual(len(test.edges), 0)
if __name__ == '__main__':
unittest.main()
| 30.3625
| 88
| 0.629889
| 301
| 2,429
| 4.963455
| 0.215947
| 0.150602
| 0.072289
| 0.088353
| 0.904284
| 0.904284
| 0.904284
| 0.904284
| 0.904284
| 0.904284
| 0
| 0.026228
| 0.262248
| 2,429
| 79
| 89
| 30.746835
| 0.807478
| 0.24578
| 0
| 0.731707
| 0
| 0
| 0.067703
| 0
| 0
| 0
| 0
| 0
| 0.439024
| 1
| 0.073171
| false
| 0
| 0.04878
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.